From 05b69c4d00eca872e0de26253f3a7ab4b5190f0a Mon Sep 17 00:00:00 2001 From: Thomas Nolan Date: Mon, 28 Nov 2016 12:45:14 +0000 Subject: [PATCH] FH-v3.15.0 Release --- .eslintrc.json | 31 + .gitignore | 28 + .jshintrc | 28 + Gruntfile.js | 29 + LICENSE | 190 + README.md | 45 + config/dev.json | 175 + config/ose-placeholders.js | 41 + docker/.dockerignore | 1 + docker/Dockerfile | 27 + docs/CrashMonitor.md | 26 + docs/api/analytics/metrics.yaml | 84 + docs/api/appforms/dataexport.yaml | 161 + docs/api/appforms/submissions.yaml | 396 + docs/api/mbaas/appdata.yaml | 144 + docs/api/storage.yaml | 37 + fh-mbaas.js | 341 + fh-mbaas.yml | 238 + lib/appdata/import/appDataImport.js | 37 + lib/appdata/import/appDataImportRunner.js | 121 + lib/appdata/import/preparationSteps.js | 161 + lib/appdata/shared/commandUtils.js | 25 + lib/appdata/shared/common.js | 164 + lib/appdata/shared/constants.js | 3 + .../shared/mongowrapper/mongoCommand.js | 77 + .../shared/mongowrapper/mongowrapper.js | 23 + .../shared/mongowrapper/restoreCommand.js | 197 + .../shared/tarwrapper/extractCommand.js | 242 + lib/appdata/shared/tarwrapper/tarwrapper.js | 12 + lib/constants/configProperties.js | 5 + lib/constants/index.js | 4 + lib/dataSourceUpdater/index.js | 10 + lib/dataSourceUpdater/lib/handlers/index.js | 7 + .../lib/handlers/processEnvDataSources.js | 181 + .../lib/handlers/requestEndpointData.js | 50 + .../handlers/updateAllEnvDataSourceCache.js | 66 + .../lib/handlers/updateDataSourceCache.js | 50 + .../lib/handlers/updateSingleDataSource.js | 60 + lib/dataSourceUpdater/lib/logger/index.js | 10 + lib/export/AppDataExportRunner.js | 127 + lib/export/agenda.js | 37 + lib/export/appDataExport.js | 72 + lib/export/appDataExportController.js | 112 + .../cleaner/appDataExportCleanerRunner.js | 176 + lib/export/commonJobFunctions/cleanUp.js | 49 + lib/export/commonJobFunctions/index.js | 7 + .../commonJobFunctions/registerStorage.js | 23 + .../updateModelWithStorageData.js | 21 + .../connectToDatabase.js | 26 + .../commonPreparationSteps/createOutputDir.js | 26 + lib/export/commonPreparationSteps/index.js | 7 + .../commonPreparationSteps/reserveSpace.js | 87 + .../retrieveCollectionSize.js | 37 + lib/export/constants.js | 3 + lib/export/index.js | 4 + lib/export/mongoExportFunctions/constants.js | 2 + .../createExportArchive.js | 75 + .../getSecondaryReplSetHost.js | 46 + lib/export/mongoExportFunctions/index.js | 6 + .../mongoExportWrapper.js | 78 + lib/export/preparationSteps.js | 173 + .../submissions/SubmissionExportRunner.js | 117 + lib/export/submissions/preparationSteps.js | 75 + .../submissions/submissionDataExport.js | 41 + lib/formsUpdater/index.js | 4 + lib/formsUpdater/lib/agenda_scheduler.js | 85 + .../lib/jobs/data_source_update.js | 25 + lib/handlers/analytics/messaging.js | 38 + lib/handlers/analytics/metrics.js | 44 + lib/handlers/analytics/metricsRouter.js | 38 + lib/handlers/api.js | 102 + lib/handlers/app.js | 37 + lib/handlers/app/data.js | 79 + lib/handlers/app/db.js | 38 + lib/handlers/app/forms.js | 100 + lib/handlers/app/handlers/generateCSV.js | 88 + lib/handlers/app/handlers/generatePDF.js | 62 + lib/handlers/app/handlers/index.js | 9 + lib/handlers/forms.js | 56 + lib/handlers/forms/forms.js | 26 + lib/handlers/forms/projects.js | 22 + lib/handlers/forms/themes.js | 25 + lib/handlers/healthmonitor.js | 105 + lib/handlers/stats/stats_client.js | 39 + lib/handlers/stats/stats_router.js | 25 + lib/handlers/sys.js | 31 + lib/import/appDataImportController.js | 78 + lib/jobs/appDataExportCleanerJob.js | 55 + lib/jobs/appDataJob.js | 93 + lib/jobs/appDataRunnerJob.js | 97 + lib/jobs/appDataStalledJobsFinder.js | 71 + lib/jobs/context.js | 145 + lib/jobs/progressPersistor.js | 112 + lib/jobs/progressPublisher.js | 109 + lib/jobs/submissions/submissionExportJob.js | 114 + lib/jobs/taggedLogger.js | 39 + lib/messageHandlers/deployStatusHandler.js | 63 + lib/messageHandlers/migrationStatusHandler.js | 125 + lib/middleware/appdata.js | 31 + lib/middleware/appdata_import.js | 125 + lib/middleware/auth.js | 29 + lib/middleware/buildJobMiddleware.js | 119 + lib/middleware/events.js | 37 + lib/middleware/mbaasApp.js | 232 + lib/models/AppdataJobSchema.js | 7 + lib/models/BaseImportExportJobSchema.js | 218 + lib/models/SubmissionDataJobSchema.js | 5 + lib/models/appEnv.js | 118 + lib/models/index.js | 16 + lib/routes/forms/dataSources/handlers.js | 446 ++ lib/routes/forms/dataSources/router.js | 27 + .../forms/submissions/handlers/export.js | 55 + .../submissions/handlers/exportCSVAsync.js | 202 + .../forms/submissions/handlers/exportPdf.js | 58 + .../forms/submissions/handlers/filter.js | 33 + .../handlers/getExportCSVStatus.js | 14 + .../forms/submissions/handlers/index.js | 11 + lib/routes/forms/submissions/handlers/list.js | 30 + .../submissions/handlers/resetExportCSV.js | 14 + .../forms/submissions/handlers/search.js | 31 + lib/routes/forms/submissions/router.js | 76 + lib/routes/services/handlers.js | 104 + lib/routes/services/router.js | 30 + lib/services/appForms/dataSources/index.js | 5 + .../appForms/dataSources/listForUpdate.js | 13 + lib/services/appmbaas/getDeployedService.js | 29 + lib/services/appmbaas/listDeployedApps.js | 15 + lib/services/appmbaas/listDeployedServices.js | 28 + lib/services/appmbaas/removeAppDb.js | 45 + .../environment/deleteEnvironmentData.js | 51 + lib/services/services/index.js | 120 + lib/storage/functional_test.js | 20 + lib/storage/impl/router.js | 230 + lib/storage/index.js | 243 + lib/storage/models/FileSchema.js | 74 + lib/storage/models/TokenSchema.js | 20 + lib/util/amqp.js | 57 + lib/util/common.js | 201 + lib/util/configvalidation.js | 78 + lib/util/dfutils.js | 122 + lib/util/ditchhelper.js | 104 + lib/util/logger.js | 33 + lib/util/mongo.js | 74 + lib/util/requiredvalidation.js | 58 + lib/util/supercoreApiClient.js | 57 + lib/util/validation.js | 22 + npm-shrinkwrap.json | 6791 +++++++++++++++++ package.json | 72 + scripts/fh-mbaas | 247 + scripts/fh-mbaas-launcher.sh | 7 + scripts/install.sh | 19 + scripts/postinstall.sh | 19 + sonar-project.properties | 10 + test/accept/common.js | 47 + test/accept/server.js | 274 + test/accept/test-api.js | 199 + test/accept/test-dataSourceUpdater.js | 94 + test/accept/test-sys.js | 31 + test/accept/test_api_app.js | 160 + test/fixtures/appdata/export/app1db.json | 10 + test/fixtures/appdata/export/exportjobs.json | 64 + test/fixtures/appdata/export/filestore.json | 633 ++ test/fixtures/appdata/import/export.tar | Bin 0 -> 10240 bytes test/fixtures/appdata/index.js | 27 + test/fixtures/config/index.js | 89 + test/fixtures/forms/dataSources.js | 89 + test/fixtures/forms/index.js | 5 + test/fixtures/forms/submissions.js | 103 + test/fixtures/index.js | 28 + test/fixtures/mock_readStream.js | 22 + test/fixtures/mock_writeStream.js | 16 + test/fixtures/services/index.js | 21 + test/setup.js | 93 + .../stubs/dataSourceUpdater/handlers/index.js | 117 + test/stubs/dataSourceUpdater/index.js | 4 + test/stubs/fhForms/index.js | 291 + test/stubs/fhServiceAuth/index.js | 143 + test/stubs/index.js | 9 + test/stubs/mbaasMiddleware/index.js | 11 + test/stubs/mongo/mongoMocks.js | 33 + .../services/appForms/dataSources/index.js | 22 + test/stubs/services/appForms/index.js | 4 + test/stubs/services/appmbaas/index.js | 31 + test/stubs/services/index.js | 6 + test/unit/appdata/import/common.js | 44 + .../import/test-appDataImportRunner.js | 126 + .../appdata/import/test-import-middleware.js | 74 + .../appdata/import/test-preparationSteps.js | 124 + .../handlers/test-processEnvDataSources.js | 92 + .../handlers/test-requestEndpointData.js | 66 + .../handlers/test-updateDataSourceCache.js | 91 + .../testCreateOutputDir.js | 66 + .../submissions/testPreparationSteps.js | 112 + .../submissions/testSubmissionExportJob.js | 21 + .../submissions/testSubmissionsDataExport.js | 99 + .../testSubmissionsExportRunner.js | 184 + .../export/testAppDataExportCleanerRunner.js | 196 + .../testAppDataExportPreparationSteps.js | 187 + test/unit/export/testAppDataExportRunner.js | 148 + test/unit/export/testFormatCollectionName.js | 22 + .../export/testGetSecondaryReplSetHost.js | 150 + .../unit/handlers/analytics/test_messaging.js | 51 + test/unit/handlers/analytics/test_metrics.js | 83 + test/unit/handlers/app/test_db.js | 89 + test/unit/handlers/app/test_forms.js | 172 + test/unit/handlers/stats/test_stats.js | 82 + test/unit/handlers/test_healthmonitor.js | 51 + test/unit/jobs/testProgressPersistor.js | 67 + test/unit/jobs/testProgressPublisher.js | 141 + test/unit/jobs/test_appDataJob.js | 123 + test/unit/jobs/test_appDataRunnerJob.js | 83 + .../jobs/test_appDataStalledJobsFinder.js | 59 + .../test-deployStatusHandler.js | 58 + test/unit/middleware/test_appdata.js | 207 + test/unit/middleware/test_mbaas_app.js | 215 + test/unit/models/test-appEnv.js | 129 + test/unit/models/test-appmbaas.js | 416 + .../unit/models/test-submission-export-job.js | 78 + .../routes/forms/test-datasource-router.js | 646 ++ test/unit/routes/forms/test-export-router.js | 94 + .../routes/forms/test-submissions-router.js | 297 + .../routes/services/test-services-routes.js | 190 + .../services/test-deleteEnvironmentData.js | 92 + test/unit/services/test-removeAppDb.js | 83 + test/unit/storage/test_router.js | 121 + test/unit/storage/test_storage.js | 96 + test/unit/util/test-common.js | 132 + test/unit/util/test-dfutils.js | 52 + test/unit/util/test-ditchhelper.js | 63 + 229 files changed, 26513 insertions(+) create mode 100644 .eslintrc.json create mode 100755 .gitignore create mode 100755 .jshintrc create mode 100755 Gruntfile.js create mode 100644 LICENSE create mode 100755 README.md create mode 100755 config/dev.json create mode 100755 config/ose-placeholders.js create mode 100644 docker/.dockerignore create mode 100644 docker/Dockerfile create mode 100644 docs/CrashMonitor.md create mode 100644 docs/api/analytics/metrics.yaml create mode 100644 docs/api/appforms/dataexport.yaml create mode 100644 docs/api/appforms/submissions.yaml create mode 100644 docs/api/mbaas/appdata.yaml create mode 100644 docs/api/storage.yaml create mode 100755 fh-mbaas.js create mode 100755 fh-mbaas.yml create mode 100644 lib/appdata/import/appDataImport.js create mode 100644 lib/appdata/import/appDataImportRunner.js create mode 100644 lib/appdata/import/preparationSteps.js create mode 100644 lib/appdata/shared/commandUtils.js create mode 100644 lib/appdata/shared/common.js create mode 100644 lib/appdata/shared/constants.js create mode 100644 lib/appdata/shared/mongowrapper/mongoCommand.js create mode 100644 lib/appdata/shared/mongowrapper/mongowrapper.js create mode 100644 lib/appdata/shared/mongowrapper/restoreCommand.js create mode 100644 lib/appdata/shared/tarwrapper/extractCommand.js create mode 100644 lib/appdata/shared/tarwrapper/tarwrapper.js create mode 100644 lib/constants/configProperties.js create mode 100644 lib/constants/index.js create mode 100644 lib/dataSourceUpdater/index.js create mode 100644 lib/dataSourceUpdater/lib/handlers/index.js create mode 100644 lib/dataSourceUpdater/lib/handlers/processEnvDataSources.js create mode 100644 lib/dataSourceUpdater/lib/handlers/requestEndpointData.js create mode 100644 lib/dataSourceUpdater/lib/handlers/updateAllEnvDataSourceCache.js create mode 100644 lib/dataSourceUpdater/lib/handlers/updateDataSourceCache.js create mode 100644 lib/dataSourceUpdater/lib/handlers/updateSingleDataSource.js create mode 100644 lib/dataSourceUpdater/lib/logger/index.js create mode 100644 lib/export/AppDataExportRunner.js create mode 100644 lib/export/agenda.js create mode 100644 lib/export/appDataExport.js create mode 100644 lib/export/appDataExportController.js create mode 100644 lib/export/cleaner/appDataExportCleanerRunner.js create mode 100644 lib/export/commonJobFunctions/cleanUp.js create mode 100644 lib/export/commonJobFunctions/index.js create mode 100644 lib/export/commonJobFunctions/registerStorage.js create mode 100644 lib/export/commonJobFunctions/updateModelWithStorageData.js create mode 100644 lib/export/commonPreparationSteps/connectToDatabase.js create mode 100644 lib/export/commonPreparationSteps/createOutputDir.js create mode 100644 lib/export/commonPreparationSteps/index.js create mode 100644 lib/export/commonPreparationSteps/reserveSpace.js create mode 100644 lib/export/commonPreparationSteps/retrieveCollectionSize.js create mode 100644 lib/export/constants.js create mode 100644 lib/export/index.js create mode 100644 lib/export/mongoExportFunctions/constants.js create mode 100644 lib/export/mongoExportFunctions/createExportArchive.js create mode 100644 lib/export/mongoExportFunctions/getSecondaryReplSetHost.js create mode 100644 lib/export/mongoExportFunctions/index.js create mode 100644 lib/export/mongoExportFunctions/mongoExportWrapper.js create mode 100644 lib/export/preparationSteps.js create mode 100644 lib/export/submissions/SubmissionExportRunner.js create mode 100644 lib/export/submissions/preparationSteps.js create mode 100644 lib/export/submissions/submissionDataExport.js create mode 100644 lib/formsUpdater/index.js create mode 100644 lib/formsUpdater/lib/agenda_scheduler.js create mode 100644 lib/formsUpdater/lib/jobs/data_source_update.js create mode 100644 lib/handlers/analytics/messaging.js create mode 100644 lib/handlers/analytics/metrics.js create mode 100644 lib/handlers/analytics/metricsRouter.js create mode 100644 lib/handlers/api.js create mode 100644 lib/handlers/app.js create mode 100644 lib/handlers/app/data.js create mode 100644 lib/handlers/app/db.js create mode 100644 lib/handlers/app/forms.js create mode 100644 lib/handlers/app/handlers/generateCSV.js create mode 100644 lib/handlers/app/handlers/generatePDF.js create mode 100644 lib/handlers/app/handlers/index.js create mode 100644 lib/handlers/forms.js create mode 100644 lib/handlers/forms/forms.js create mode 100644 lib/handlers/forms/projects.js create mode 100644 lib/handlers/forms/themes.js create mode 100755 lib/handlers/healthmonitor.js create mode 100644 lib/handlers/stats/stats_client.js create mode 100644 lib/handlers/stats/stats_router.js create mode 100755 lib/handlers/sys.js create mode 100644 lib/import/appDataImportController.js create mode 100644 lib/jobs/appDataExportCleanerJob.js create mode 100644 lib/jobs/appDataJob.js create mode 100644 lib/jobs/appDataRunnerJob.js create mode 100644 lib/jobs/appDataStalledJobsFinder.js create mode 100644 lib/jobs/context.js create mode 100644 lib/jobs/progressPersistor.js create mode 100644 lib/jobs/progressPublisher.js create mode 100644 lib/jobs/submissions/submissionExportJob.js create mode 100644 lib/jobs/taggedLogger.js create mode 100644 lib/messageHandlers/deployStatusHandler.js create mode 100644 lib/messageHandlers/migrationStatusHandler.js create mode 100644 lib/middleware/appdata.js create mode 100644 lib/middleware/appdata_import.js create mode 100644 lib/middleware/auth.js create mode 100644 lib/middleware/buildJobMiddleware.js create mode 100644 lib/middleware/events.js create mode 100644 lib/middleware/mbaasApp.js create mode 100644 lib/models/AppdataJobSchema.js create mode 100644 lib/models/BaseImportExportJobSchema.js create mode 100644 lib/models/SubmissionDataJobSchema.js create mode 100644 lib/models/appEnv.js create mode 100644 lib/models/index.js create mode 100644 lib/routes/forms/dataSources/handlers.js create mode 100644 lib/routes/forms/dataSources/router.js create mode 100644 lib/routes/forms/submissions/handlers/export.js create mode 100644 lib/routes/forms/submissions/handlers/exportCSVAsync.js create mode 100644 lib/routes/forms/submissions/handlers/exportPdf.js create mode 100644 lib/routes/forms/submissions/handlers/filter.js create mode 100644 lib/routes/forms/submissions/handlers/getExportCSVStatus.js create mode 100644 lib/routes/forms/submissions/handlers/index.js create mode 100644 lib/routes/forms/submissions/handlers/list.js create mode 100644 lib/routes/forms/submissions/handlers/resetExportCSV.js create mode 100644 lib/routes/forms/submissions/handlers/search.js create mode 100644 lib/routes/forms/submissions/router.js create mode 100644 lib/routes/services/handlers.js create mode 100644 lib/routes/services/router.js create mode 100644 lib/services/appForms/dataSources/index.js create mode 100644 lib/services/appForms/dataSources/listForUpdate.js create mode 100644 lib/services/appmbaas/getDeployedService.js create mode 100644 lib/services/appmbaas/listDeployedApps.js create mode 100644 lib/services/appmbaas/listDeployedServices.js create mode 100644 lib/services/appmbaas/removeAppDb.js create mode 100644 lib/services/environment/deleteEnvironmentData.js create mode 100644 lib/services/services/index.js create mode 100644 lib/storage/functional_test.js create mode 100644 lib/storage/impl/router.js create mode 100644 lib/storage/index.js create mode 100644 lib/storage/models/FileSchema.js create mode 100644 lib/storage/models/TokenSchema.js create mode 100644 lib/util/amqp.js create mode 100755 lib/util/common.js create mode 100755 lib/util/configvalidation.js create mode 100644 lib/util/dfutils.js create mode 100644 lib/util/ditchhelper.js create mode 100644 lib/util/logger.js create mode 100644 lib/util/mongo.js create mode 100755 lib/util/requiredvalidation.js create mode 100644 lib/util/supercoreApiClient.js create mode 100755 lib/util/validation.js create mode 100644 npm-shrinkwrap.json create mode 100644 package.json create mode 100755 scripts/fh-mbaas create mode 100755 scripts/fh-mbaas-launcher.sh create mode 100755 scripts/install.sh create mode 100755 scripts/postinstall.sh create mode 100644 sonar-project.properties create mode 100755 test/accept/common.js create mode 100755 test/accept/server.js create mode 100755 test/accept/test-api.js create mode 100644 test/accept/test-dataSourceUpdater.js create mode 100755 test/accept/test-sys.js create mode 100644 test/accept/test_api_app.js create mode 100644 test/fixtures/appdata/export/app1db.json create mode 100644 test/fixtures/appdata/export/exportjobs.json create mode 100644 test/fixtures/appdata/export/filestore.json create mode 100644 test/fixtures/appdata/import/export.tar create mode 100644 test/fixtures/appdata/index.js create mode 100644 test/fixtures/config/index.js create mode 100644 test/fixtures/forms/dataSources.js create mode 100644 test/fixtures/forms/index.js create mode 100644 test/fixtures/forms/submissions.js create mode 100644 test/fixtures/index.js create mode 100644 test/fixtures/mock_readStream.js create mode 100644 test/fixtures/mock_writeStream.js create mode 100644 test/fixtures/services/index.js create mode 100644 test/setup.js create mode 100644 test/stubs/dataSourceUpdater/handlers/index.js create mode 100644 test/stubs/dataSourceUpdater/index.js create mode 100644 test/stubs/fhForms/index.js create mode 100644 test/stubs/fhServiceAuth/index.js create mode 100644 test/stubs/index.js create mode 100644 test/stubs/mbaasMiddleware/index.js create mode 100644 test/stubs/mongo/mongoMocks.js create mode 100644 test/stubs/services/appForms/dataSources/index.js create mode 100644 test/stubs/services/appForms/index.js create mode 100644 test/stubs/services/appmbaas/index.js create mode 100644 test/stubs/services/index.js create mode 100644 test/unit/appdata/import/common.js create mode 100644 test/unit/appdata/import/test-appDataImportRunner.js create mode 100644 test/unit/appdata/import/test-import-middleware.js create mode 100644 test/unit/appdata/import/test-preparationSteps.js create mode 100644 test/unit/dataSourceUpdater/handlers/test-processEnvDataSources.js create mode 100644 test/unit/dataSourceUpdater/handlers/test-requestEndpointData.js create mode 100644 test/unit/dataSourceUpdater/handlers/test-updateDataSourceCache.js create mode 100644 test/unit/export/commonPreparationSteps/testCreateOutputDir.js create mode 100644 test/unit/export/submissions/testPreparationSteps.js create mode 100644 test/unit/export/submissions/testSubmissionExportJob.js create mode 100644 test/unit/export/submissions/testSubmissionsDataExport.js create mode 100644 test/unit/export/submissions/testSubmissionsExportRunner.js create mode 100644 test/unit/export/testAppDataExportCleanerRunner.js create mode 100644 test/unit/export/testAppDataExportPreparationSteps.js create mode 100644 test/unit/export/testAppDataExportRunner.js create mode 100644 test/unit/export/testFormatCollectionName.js create mode 100644 test/unit/export/testGetSecondaryReplSetHost.js create mode 100644 test/unit/handlers/analytics/test_messaging.js create mode 100644 test/unit/handlers/analytics/test_metrics.js create mode 100644 test/unit/handlers/app/test_db.js create mode 100644 test/unit/handlers/app/test_forms.js create mode 100644 test/unit/handlers/stats/test_stats.js create mode 100644 test/unit/handlers/test_healthmonitor.js create mode 100644 test/unit/jobs/testProgressPersistor.js create mode 100644 test/unit/jobs/testProgressPublisher.js create mode 100644 test/unit/jobs/test_appDataJob.js create mode 100644 test/unit/jobs/test_appDataRunnerJob.js create mode 100644 test/unit/jobs/test_appDataStalledJobsFinder.js create mode 100644 test/unit/messageHandlers/test-deployStatusHandler.js create mode 100644 test/unit/middleware/test_appdata.js create mode 100644 test/unit/middleware/test_mbaas_app.js create mode 100644 test/unit/models/test-appEnv.js create mode 100644 test/unit/models/test-appmbaas.js create mode 100644 test/unit/models/test-submission-export-job.js create mode 100644 test/unit/routes/forms/test-datasource-router.js create mode 100644 test/unit/routes/forms/test-export-router.js create mode 100644 test/unit/routes/forms/test-submissions-router.js create mode 100644 test/unit/routes/services/test-services-routes.js create mode 100644 test/unit/services/test-deleteEnvironmentData.js create mode 100644 test/unit/services/test-removeAppDb.js create mode 100644 test/unit/storage/test_router.js create mode 100644 test/unit/storage/test_storage.js create mode 100644 test/unit/util/test-common.js create mode 100644 test/unit/util/test-dfutils.js create mode 100644 test/unit/util/test-ditchhelper.js diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..fe3bf5b --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,31 @@ +{ + "env": { + "node": true, + "es6": true, + "mocha": true + }, + "extends": "eslint:recommended", + "rules": { + "array-callback-return": "warn", + "brace-style": ["error", "1tbs"], + "complexity": ["warn", 20], + "eqeqeq": "error", + "guard-for-in": "error", + "indent": ["error", 2], + "linebreak-style": ["error", "unix"], + "no-array-constructor": "error", + "no-console": "warn", + "no-lonely-if": "warn", + "no-loop-func": "warn", + "no-mixed-spaces-and-tabs": ["error"], + "no-nested-ternary": "error", + "no-spaced-func": "error", + "no-trailing-spaces": "error", + "semi": ["error", "always"], + "space-before-blocks": "error", + "space-before-function-paren": ["error", "never"], + "keyword-spacing": ["error"], + "curly": ["error", "all"], + "no-unused-vars": ["error", {"argsIgnorePattern": "\\bnext\\b"}] + } +} diff --git a/.gitignore b/.gitignore new file mode 100755 index 0000000..e2c6ca1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +lib-cov/ +coverage/ +.project +.settings/ +dist/ +output/ +man/ +node_modules/ +*~ +nohup.out +TAGS +dump.rdb +hosts +resolv.conf +.idea/ +npm*.log +fhc-debug.log +dfc-debug.log +.DS_Store +._.DS_Store +a.out +fh-log +*.o +cov-* +plato +.idea +conf-docker.json +*.swp diff --git a/.jshintrc b/.jshintrc new file mode 100755 index 0000000..b71c2df --- /dev/null +++ b/.jshintrc @@ -0,0 +1,28 @@ +{ + "predef": [ "describe", "it", "before", "after", "beforeEach"], + "asi" : true, + "camelcase" : false, + "bitwise" : false, + "unused" : true, + "laxbreak" : true, + "laxcomma" : true, + "curly" : false, + "eqeqeq" : true, + "evil" : true, + "forin" : false, + "immed" : true, + "latedef" : false, + "newcap" : false, + "noarg" : true, + "noempty" : true, + "nonew" : true, + "plusplus" : false, + "regexp" : true, + "undef" : true, + "strict" : false, + "sub" : true, + "trailing" : true, + "node" : true, + "maxerr" : 100, + "indent" : 2 +} diff --git a/Gruntfile.js b/Gruntfile.js new file mode 100755 index 0000000..c5c5b9b --- /dev/null +++ b/Gruntfile.js @@ -0,0 +1,29 @@ +module.exports = function(grunt) { + 'use strict'; + + grunt.initConfig({ + + _test_runner: '_mocha', + + _unit_args: '-b -A -u exports -t 10000', + optional: ['mocha -A -u exports --recursive -t 10000 test/accept/test-backoff.js'], + + unit: ['echo $NODE_PATH', '<%= _test_runner %> <%= _unit_args %> --recursive ./test/unit/**/test*.js'], + + // use `grunt fh:testfile:{{unit_test_filename}}` to run a single test file + unit_single: ['<%= _test_runner %> <%= _unit_args %> <%= unit_test_filename %>'], + + accept: ['turbo --series=true --setUp=test/accept/server.js --tearDown=test/accept/server.js test/accept/test-sys.js test/accept/test-api.js test/accept/test-dataSourceUpdater.js'], + + unit_cover: ['istanbul cover --dir cov-unit -- node_modules/.bin/_mocha <%= _unit_args %> --recursive ./test/unit/**/test*.js'], + + accept_cover: [ + 'istanbul cover --dir cov-accept ./node_modules/.bin/turbo -- --series=true --setUp=test/accept/server.js --tearDown=test/accept/server.js test/accept/test-sys.js test/accept/test-api.js' + ] + }); + + grunt.loadNpmTasks('grunt-fh-build'); + grunt.registerTask('default', ['eslint', 'fh:dist']); + + grunt.registerTask('coverage', ['fh:coverage']); +}; diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d554517 --- /dev/null +++ b/LICENSE @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2016 Red Hat, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100755 index 0000000..a1ab6d2 --- /dev/null +++ b/README.md @@ -0,0 +1,45 @@ +## fh-mbaas - FeedHenry MBaaS Management Service + +### Development guide + +Use the following steps to develop and debug fh-mbaas component: + +* Be sure the source code of `fh-mbaas` is mounted via `nfs` to your Vagrant machine. `fh-mbaas` directory should be by default located in `/mnt/src/fh-mbaas` in Vagrant machine. You will be prompted to configure the folder which contains the source code on you host meachine when you run `./setup.sh` in `fhcap` for the first time. Before doing `vagrant up`, include the following configuration snippet into `roles/dev.json` in `fhcap` repository as a child of `override_attributes` field: + +``` + "fh-mbaas": { + "install_method": "source" + } +``` + +* After mounting, you see your sources which are located at your host machine in the context of the Vagrant box. You can develop this component using your favorite IDE on your host machine and they are in sync with Vagrant machine. +* Installation of `fh-mbaas` component is done by running `npm install`. Be sure you execute this as the superuser since it installs configuration files and init scripts to `/usr` and `/etc` respectively. You should stop the already running `fh-mbaas` component by `sudo service fh-mbaas stop` and start it after installation is done. +* Logs of `fh-mbaas` are stored in `/var/log/feedhenry/fh-mbaas` +* Once you want to start the service in "developer" mode, you do it via `grunt serve`. This effectively uses configuration file in `config/dev.json` +* The testing has been split due to the core middleare function moving to fh-mbaas-middleware - only 'grunt accept' is supported +* Unit test have been moved to the fh-mbaas-middleware module +* If you want to be able to debug a running `fh-mbaas` service, you have to start it by triggering the debug task in Grunt by running `grunt debug`. Be sure that port `8080` in the Vagrant machine is not occupied by another service. It is possible that `grunt debug` will fail to start as a result. You find the PID of the process which already listens to this port by executing `netstat -tupln | grep 8080` as the root user. You have to stop the Tomcat server by `/etc/init.d/tomcat6 stop`. Once this is done, open node's debug console at address `http://127.0.0.1:8080/debug?port=5858`. The `127.0.0.1` is little bit misleading. If you open this address from your host's browser but it will not see any console running on host's `127.0.0.1`. You have to replace this address with the address that the Vagrant machine uses internally, e.g '192.168.33.10'. This address is visible from your host computer and since it represents the IP of the Vagrant machine, it will start the debugger. +* Plato statistics are generated via `grunt analysis`. Note that in order to see generated analysis through your web browser, you have to open statistics in `plato/index.html`. Run `grunt analysis` either on your host or on your Vargant machine as the sources are the same. View it on the host machine as there is no web browser installed on the Vagrant machine. + + +### Email Configuration + +To configure email sending from `fh-mbaas`, the `email.transport` configuration property must be set to: + +* `""` to use the default transport (sendmail) +* `"sendgrid"` - to use sendgrid. The following config must also be set on the `email.sendgrid` object: + "auth": { + "api_user": "SENDGRID_USER", + "api_key": "SENDGRID_PASSWORD" + } +* `"smtp"` - to use SMTP. An SMTP connection URL must also be set on the `email.smtp` string: + "smtps://user:password@smtp.example.com" + +#### OSE3 +For OSE3 images, fh-mbaas will use `smtp` by default. An administrator must add an Environment variable to configure `fh-mbaas` to use a local SMTP relay/server - the name of this environment variable is `FH_EMAIL_SMTP` and it should be set to an SMTP URL e.g. `smtps://user:password@smtp.example.com` + +### Testing (grunt fh:test) + +* Modules published to npm.skunkhenry.com + + * The module turbo-test-runner has been updated (heapdump and memwatch needed upgrading), due to permissions on npmjs this has been published to npm.skunkhenry.com diff --git a/config/dev.json b/config/dev.json new file mode 100755 index 0000000..02bbe03 --- /dev/null +++ b/config/dev.json @@ -0,0 +1,175 @@ +{ + "fhmbaas": { + "port": 8819, + "key": "ADD_YOUR_KEY", + "maxpayloadsize": "100mb", + "temp_forms_files_dest" : "/tmp/fh-mbaas", + "maxConcurrentPhantomPerWorker": 1, + "pdfExportDir": "/tmp/fh-mbaas", + "code_coverage_enabled": false, + "mbaasid":"development", + "dsMinsPerBackOffIndex": 1, + "pagination": { + "maxLimit": 1000, + "defaultLimit": 10 + }, + "appdataexport" : { + "output_dir" : "/var/feedhenry/data", + "schedule_time": 180000, + "default_lock_lifetime":120000, + "cleaner": { + "frequency": "*/30 * * * *", + "grace_time": 10 + } + }, + "appdata_jobs" : { + "upload_dir" : "/var/feedhenry/upload", + "scheduler": { + "concurrency": 1, + "frequency": "30 seconds" + }, + "stalled_job_finder": { + "frequency": "1 minute" + } + } + }, + "logger": { + "name": "mbaas", + "streams": [{ + "type": "stream", + "src": true, + "level": "trace", + "stream": "process.stdout" + }, { + "type": "raw", + "src": true, + "level": "trace", + "stream": "ringBuffer" + }] + }, + "mongo": { + "enabled": true, + "name": "fh-mbaas", + "host": "localhost", + "port": 27017, + "replicaset_name": null, + "auth": { + "enabled": false, + "user": "", + "pass": "" + }, + "admin_auth": { + "user": "u-mbaas", + "pass": "password" + } + }, + "fhditch": { + "host": "localhost", + "port": 8802, + "protocol": "http", + "service_key": "test" + }, + "fhdfc": { + "dynofarm": "http://localhost:9000", + "username":"DYNOFARM_USERNAME", + "_password": "DYNOFARM_PASSWORD", + "loglevel": "warn", + "cache_timeout": 300000 + }, + "fhamqp": { + "app": { + "enabled": true + }, + "enabled": true, + "max_connection_retry": 10, + "ssl": false, + "nodes": "node1:5672", + "vhosts": { + "events": { + "clusterNodes": [ + "amqp://amqpuser:amqppassword@node1.feedhenry.local:5672/fhevents" + ] + }, + "internal":{ + "clusterNodes": [ + "amqp://amqpuser:amqppassword@node1.feedhenry.local:5672/fhinternal" + ], + "heartbeat": 20 + } + } + }, + "fhmetrics" :{ + "host":"127.0.0.1", + "port":"8813", + "protocol":"http", + "apikey":"ADD_YOUR_KEY" + }, + "fhmessaging":{ + "enabled": true, + "host":"localhost", + "protocol":"http", + "port":8803, + "path":"msg/TOPIC", + "cluster":"development", + "realtime": true, + "apikey":"secretkey", + "files":{ + "recovery_file":"../messages/recovery.log", + "backup_file":"../messages/backup.log" + } + }, + "fhstats":{ + "enabled": true, + "host":"localhost", + "port": 8804, + "protocol": "http", + "apikey": "ADD_YOUR_KEY" + }, + "fhredis":{ + "host": "127.0.0.1", + "port": 6379, + "password":"FHREDIS_PASSWORD" + }, + "crash_monitor":{ + "enabled":true, + "min_num_crashes":10, + "max_num_crashes":40, + "tolerance":1, + "base_time_seconds":60, + "sample_time_hrs":1 + }, + "email": { + "transport": "", + "sendgrid": { + "auth": { + "api_user": "SENDGRID_USER", + "api_key": "SENDGRID_PASSWORD" + } + }, + "smtp": "smtps://user:password@smtp.example.com", + "alert_email_from": "no-reply@feedhenry.com", + "alert_email_bcc": "" + }, + "auto_create_app_dbs":["openshift3"], + "openshift3": false, + "agenda": { + "enabled": true, + "preferredWorkerId": 1, + "jobs": { + "data_source_update": { + "schedule": "60 seconds" + } + } + }, + "component_metrics": { + "enabled": false, + "host": "192.168.33.1", + "port": 4444 + }, + "storage": { + "token_exp_time" : 600, + "base_url_protocol": "https", + "base_url_host":"files.feedhenry.me" + }, + "ops_env_file": "/etc/feedhenry/openv.json" +} diff --git a/config/ose-placeholders.js b/config/ose-placeholders.js new file mode 100755 index 0000000..9cd9b50 --- /dev/null +++ b/config/ose-placeholders.js @@ -0,0 +1,41 @@ +var placeholders = { + "openshift3": true, + "crash_monitor.enabled": false, + "fhamqp.enabled": false, + "fhmbaas.port": 8080, + "fhmbaas.key": "{{env.FHMBAAS_KEY}}", + "mongo.name": "{{env.MONGODB_FHMBAAS_DATABASE}}", + "mongo.host": "mongodb-1.{{env.MBAAS_NAMESPACE}}", + "mongo.replicaset_name" : "{{env.MONGODB_REPLICA_NAME}}", + "mongo.auth.enabled": true, + "mongo.auth.user": "{{env.MONGODB_FHMBAAS_USER}}", + "mongo.auth.pass": "{{env.MONGODB_FHMBAAS_PASSWORD}}", + "mongo.admin_auth.user": "admin", + "mongo.admin_auth.pass": "{{env.MONGODB_ADMIN_PASSWORD}}", + "fhredis.host": "{{env.REDIS_SERVICE_SERVICE_HOST}}", + "fhredis.port": "{{env.REDIS_SERVICE_SERVICE_PORT}}", + "fhmetrics.host": "{{env.FH_METRICS_SERVICE_SERVICE_HOST}}", + "fhmetrics.port": "{{env.FH_METRICS_SERVICE_SERVICE_PORT}}", + "fhmetrics.apikey": "{{env.FH_METRICS_API_KEY}}", + "fhmessaging.host": "{{env.FH_MESSAGING_SERVICE_SERVICE_HOST}}", + "fhmessaging.port": "{{env.FH_MESSAGING_SERVICE_SERVICE_PORT}}", + "fhmessaging.apikey": "{{env.FH_MESSAGING_API_KEY}}", + "fhmessaging.realtime": true, + "fhmessaging.cluster":"{{env.FH_CLUSTER}}", + "fhstats.host": "{{env.FH_STATSD_SERVICE_SERVICE_HOST}}", + "fhstats.port": "{{env.FH_STATSD_SERVICE_SERVICE_PORT}}", + "fhstats.udp.port": "{{env.FH_STATSD_SERVICE_PORT_8081_UDP_PORT}}", + "fhstats.udp.protocol": "{{env.FH_STATSD_SERVICE_PORT_8081_UDP_PROTO}}", + "fhstats.apikey": "{{env.FH_STATSD_API_KEY}}", + "fhmbaas.mbaasid":"{{env.FH_MBAASID}}", + "email.smtp": "{{env.FH_EMAIL_SMTP}}", + "email.alert_email_from": "{{env.FH_EMAIL_ALERT_FROM}}", + "email.transport": "smtp", + "component_metrics.host":"{{env.FH_COMPONENT_METRICS_HOST}}", + "component_metrics.enabled":"{{env.FH_COMPONENT_METRICS_ENABLED}}", + "component_metrics.port":"{{env.FH_COMPONENT_METRICS_PORT}}", + "logger.streams[0].level": "{{env.FH_LOG_LEVEL}}", + "logger.streams[1].level": "{{env.FH_LOG_LEVEL}}" +}; + +module.exports = placeholders; diff --git a/docker/.dockerignore b/docker/.dockerignore new file mode 100644 index 0000000..9414382 --- /dev/null +++ b/docker/.dockerignore @@ -0,0 +1 @@ +Dockerfile diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..5b5ef3c --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,27 @@ +FROM registry.access.redhat.com/rhscl/nodejs-4-rhel7 + +EXPOSE 8080 + +USER root + +RUN mkdir -p /var/feedhenry/data && \ + mkdir -p /var/feedhenry/upload && \ + chmod -R 755 /var/feedhenry && \ + ln -sf /usr/share/zoneinfo/UTC /etc/localtime && \ + mkdir -p config && \ + chown -R default:root ./ + +# Installing fonts to be able to render PDFs for submissions +RUN yum install -y dejavu-sans-fonts +RUN yum install -y https://s3-eu-west-1.amazonaws.com/fhcap/phantomjs-1.9.7-3.el7map.x86_64.rpm + +USER default + +# Extract app to work dir and copy conf +COPY fh-*.tar.gz ./ +RUN tar -xf fh-*.tar.gz --strip 1 && \ + rm fh-*.tar.gz && \ + mv conf-docker.json config/conf.json && \ + chmod -R ug+rw ./ + +CMD ["bash", "-c", "node fh-mbaas.js config/conf.json --master-only"] diff --git a/docs/CrashMonitor.md b/docs/CrashMonitor.md new file mode 100644 index 0000000..bc0ad08 --- /dev/null +++ b/docs/CrashMonitor.md @@ -0,0 +1,26 @@ +The crash monitor listens for crash messages from apps and based on configuration options decides whether any action should be taken. + +Example configuration: + +``` +"crash_monitor":{ + "enabled":true, + "min_num_crashes":1, // the minimum number of crashes that have to take place before the crash monitor will take any action + "max_num_crashes":20, // the maximum number of crashes we will tolerate over the specified sample_time_hrs value. If this value is reached the app will be stopped. This is more of a fail safe option. + "tolerance":1, // the number of crashes we will tolerate within the base_time_seconds. So here we will tolerate 1 crash per 60 seconds up to a maximum of 20 + "base_time_seconds":60, // + "sample_time_hrs":1 //A rolling period of time that we sample. So here we would take up to the last hour of crashes. Once the time elapsed passed this value a new test period is started. + } + +``` + + Each time a crash message is received, the average number of crashes that has occurred within the last sample_time_hrs is calculated. The sample time starts from the time of the first crash occurance. Once an app has crashed the minimum number of times, we begin deciding whether to take action or not + + E.g. if the sample_time_hrs is 1 and the app started crashing 30 mins ago and has now crashed 31 times we will have an average number of crashes of 1 per minute. + If our tolerance is 1 and base_time_seconds is 60 then our tolerance will have been met and the app is stopped. + + The reason for using an average is to try to cater for the fact that apps do not always crash with regular intervals. It may crash 3 times in 3 seconds and then not crash for 10 minutes. + + A sample time is used to gain insight into the recent behaviour of the app rather than the behaviour of the app over a very long periods of time. + + The maximum_num_crashes is a fail safe for if an app manages to avoid being stopped. \ No newline at end of file diff --git a/docs/api/analytics/metrics.yaml b/docs/api/analytics/metrics.yaml new file mode 100644 index 0000000..2f536a5 --- /dev/null +++ b/docs/api/analytics/metrics.yaml @@ -0,0 +1,84 @@ +# sys endpoint routes +swagger: '2.0' +info: + title: Metrics Api + description: Endpoints For Retrieving Metrics from an mbaas + version: "1.0.0" + +schemes: + - http + +basePath: /api/metrics + +produces: + - application/json + +paths: + /: + get: + summary: Get Rolled up metrics from an mbaas + description: Returns the rolled up metrics from fh-metrics in the mbaas + tags: + - Metrics + responses: + 200: + description: + schema: + type: Metrics + 500: + description: Error Getting Metrics + schema: + type: object + 400: + description: Bad request Getting Metrics + schema: + type: object + +#TODO Full FH-Supercore Config +definitions: + Metrics: + properties: + domainrequestsgeo: + type: array + domainstartupsdest: + type: array + domaininstallsgeo: + type: array + domainrequestsdest: + type: array + domainstartupsgeo: + type: array + domaintransactionsdest: + type: array + domaintransactionsgeo: + type: array + domaininstallsdest: + type: array + appinstallsdest: + type: array + apprequestsdest: + type: array + apprequestsgeo: + type: array + appstartupsdest: + type: array + apptransactionsgeo: + type: array + appstartupsgeo: + type: array + apptransactionsdest: + type: array + appinstallsgeo + type: array + + + + + + + + + + + + diff --git a/docs/api/appforms/dataexport.yaml b/docs/api/appforms/dataexport.yaml new file mode 100644 index 0000000..f2461a2 --- /dev/null +++ b/docs/api/appforms/dataexport.yaml @@ -0,0 +1,161 @@ +# sys endpoint routes +swagger: '2.0' +info: + title: Appforms Submissions Admin API + description: Endpoints For Retrieving Metrics from an mbaas + version: "1.0.0" + +schemes: + - https + +basePath: /api + +produces: + - application/json + +parameters: + domain: + name: domain + in: path + description: the domain of the application + type: string + required: true + environment: + name: environment + in: path + description: the environment of the application + type: string + required: true + appid: + name: appid + in: path + description: the unique id of the application + type: string + required: true + job_id: + name: job_id + in: path + description: id of the export job to download the data from + type: string + required: true + +paths: + /{domain}/{environment}/appforms/data/export: + get: + tags: + - MbaaS + - Appforms + - Submissions + description: List export operations + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + responses: + 200: + description: List of export operations, includes both ongoing operations and historical data + schema: + type: array + items: + $ref: '#/definitions/ExportJob' + 401: + description: Authentication Invalid + post: + tags: + - MbaaS + - Appforms + - Submissions + description: Create a new export job + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + responses: + 200: + description: New job created successfully + schema: + $ref: '#/definitions/ExportJob' + 401: + description: Authentication Invalid + 500: + description: Error creating job + /{domain}/{environment}/appforms/data/export/{job_id}: + get: + tags: + - MbaaS + - Appforms + - Submissions + description: Details of an export operation + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + - $ref: "#/parameters/job_id" + responses: + 200: + description: Details of a single export operation + schema: + $ref: '#/definitions/ExportJob' + 404: + description: job_id not found + 401: + description: Authentication Invalid + post: + tags: + - MbaaS + - Appforms + - Submissions + description: Create download token+url for generated file + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + - $ref: "#/parameters/job_id" + responses: + 200: + description: Token generation successful + schema: + properties: + url: string + description: Download URL for the related file, including single-use token + 401: + description: Authentication Invalid + 404: + description: + Job not found or in invalid state + (i.e. not finished successfully and with file available) +definitions: + ExportJob: + properties: + "_id": + type: string + environment: + type: string + domain: + type: string + status: + type: string + type: + type: string + step: + type: number + totalSteps: + type: number + fileSize: + type: number + fileDeleted: + type: string + fileId: + type: string + progress: + type: object + metadata: + type: object + logs: + type: array + items: + type: string + created: + type: date + modified: + type: date \ No newline at end of file diff --git a/docs/api/appforms/submissions.yaml b/docs/api/appforms/submissions.yaml new file mode 100644 index 0000000..95928f0 --- /dev/null +++ b/docs/api/appforms/submissions.yaml @@ -0,0 +1,396 @@ +# sys endpoint routes +swagger: '2.0' +info: + title: Appforms Submissions Admin API + description: Endpoints For Retrieving Metrics from an mbaas + version: "1.0.0" + +schemes: + - http + +basePath: /api + +produces: + - application/json + +paths: + /{domain}/{environment}/appforms/submissions: + get: + tags: + - MbaaS + - Appforms + - Submissions + summary: List All Submissions For An Environment + parameters: + - in: path + name: environment + description: Environment + type: string + required: true + - in: path + name: domain + description: Domain + type: string + required: true + - in: query + name: page + description: Page Number + type: string + required: true + - in: query + name: limit + description: Number of submissions per page + type: string + required: true + - in: query + name: filter + description: Optional filter for submission metadata. + type: string + required: false + responses: + 200: + description: List Of Submissions + schema: + properties: + pages: + type: number + description: Total pages that match the filter + total: + type: number + description: Total number of submissions that match the filter + submissions: + type: array + items: + $ref: '#/definitions/Submission' + 500: + description: Error getting Submissions + schema: + $ref: '#/definitions/Error' + /{domain}/{environment}/appforms/filter: + post: + tags: + - MbaaS + - Appforms + - Submissions + summary: List All Submissions For An Environment + parameters: + - in: path + name: environment + description: Environment + type: string + required: true + - in: path + name: domain + description: Domain + type: string + required: true + - in: query + name: page + description: Page Number + type: string + required: true + - in: query + name: limit + description: Number of submissions per page + type: string + required: true + - in: query + name: filter + description: Optional filter for submission metadata. + type: string + required: false + responses: + 200: + description: List Of Submissions + schema: + properties: + pages: + type: number + description: Total pages that match the filter + total: + type: number + description: Total number of submissions that match the filter + submissions: + type: array + items: + $ref: '#/definitions/Submission' + 500: + description: Error getting Submissions + schema: + $ref: '#/definitions/Error' + /{domain}/{environment}/appforms/search: + post: + tags: + - MbaaS + - Appforms + - Submissions + summary: List All Submissions For An Environment + parameters: + - in: path + name: environment + description: Environment + type: string + required: true + - in: path + name: domain + description: Domain + type: string + required: true + - in: body + schema: + type: string + name: formId + description: ID Of A Form To Filter Submissions By + required: true + - in: body + schema: + type: string + enum: + - "and" + - "or" + name: clauseOperator + description: Operator To Combine Search Clauses + required: true + - in: body + schema: + type: array + items: + $ref: '#/definitions/SubmissionQueryFields' + name: queryFields + - in: body + schema: + type: array + items: + $ref: '#/definitions/SubmissionQueryMeta' + name: queryMeta + - in: body + name: page + description: Page Number + type: string + required: true + - in: body + name: limit + description: Number of submissions per page + type: string + required: true + responses: + 200: + description: List Of Submissions + schema: + properties: + pages: + type: number + description: Total pages that match the filter + total: + type: number + description: Total number of submissions that match the filter + submissions: + type: array + items: + $ref: '#/definitions/Submission' + 500: + description: Error getting Submissions + schema: + $ref: '#/definitions/Error' + +definitions: + Submission: + properties: + _id: + type: string + description: Submission ID + appClientId: + type: string + description: ID Of The Client App That Performed The Submission + appCloudName: + type: string + description: Full Name Of The Cloud App That Received The Submission + appEnvironment: + type: string + description: Environment Of The Cloud That Received The Submission + appId: + type: string + description: The GUID Of The Project The Submission Was Submitted Against + comments: + type: array + items: + type: object + properties: + madeBy: + type: string + madeOn: + type: string + format: dateTime + value: + type: string + description: Comment Made By User + deviceFormTimestamp: + type: string + format: dateTime + description: Timestamp Of The Form The Submission Was Made Against + deviceIPAddress: + type: string + description: Comma-Separated List Of IP Addressses The Submission Was Made Against + example: "10.0.2.2,127.0.0.1" + deviceId: + type: string + description: The ID Of The Device That Made The Submission + formId: + type: string + description: The ID Of The Form The Submission Was Made Against + masterFormTimestamp: + type: string + format: dateTime + description: The Last Updated Timestamp Of The Form Stored On The Cloud When The Submission Was Made + status: + type: string + description: Current Status Of The Submission + enum: + - complete + - pending + - error + submissionCompletedTimestamp: + type: string + format: dateTime + description: The Date & Time The Submission JSON & Files Were Validated And Completed + submissionStartedTimestamp: + type: string + format: dateTime + description: The Date & Time The Submission JSON Was Validated + timezoneOffset: + type: integer + description: The Offset, In Minutes, From UTC Of The Device That Performed The Submission + updatedTimestamp: + type: string + format: dateTime + description: The Last Time The Submission Was Updated From Studio + userId: + type: string + description: The ID Of The User That Performed The Submission (Not Used) + formName: + type: string + description: The name of the form that the submission was made against + formFields: + description: Field Values Made Against The Submission + type: array + items: + type: object + properties: + fieldId: + description: Full Field Definition Of The Field The Values Were Made Against + type: object + properties: + _id: + type: string + description: Field ID + type: + type: string + description: Field type + fieldValues: + description: Array Of Field Values Made Against This Field. Field Value Format Depends On The Type Of Field Submitted. + type: array + items: + $ref: '#/definitions/FieldValueObject' + FieldValueObject: + properties: + text: + type: string + textarea: + type: string + url: + type: string + emailAddress: + type: string + dropdown: + type: string + radio: + type: string + locationMap: + type: object + properties: + lat: + type: number + long: + type: number + location: + type: object + properties: + lat: + type: number + long: + type: number + checkboxes: + type: array + items: + type: string + photo: + $ref: '#/definitions/FileFieldObject' + file: + $ref: '#/definitions/FileFieldObject' + signature: + $ref: '#/definitions/FileFieldObject' + barcode: + type: object + properties: + text: + type: string + format: + type: string + number: + type: number + sliderNumber: + type: number + dateTime: + type: string + format: dateTime + FileFieldObject: + properties: + fileName: + type: string + fileType: + type: string + fileSize: + type: integer + groupId: + type: string + hashName: + type: string + url: + type: string + Error: + properties: + code: + type: integer + description: Error Code (Feedhenry) + userDetail: + type: string + description: Error Message To Present To The User + systemDetail: + type: string + description: More Detailed Message For Debugging (e.g. Stack Trace) + SubmissionQueryFields: + properties: + clauses: + type: array + items: + $ref: '#/definitions/SubmissionQueryFieldsClause' + SubmissionQueryMeta: + properties: + clauses: + type: array + items: + $ref: '#/definitions/SubmissionQueryMetaClause' + SubmissionQueryMetaClause: + properties: + fieldId: + type: string + description: ID Of The Field To Search + metaName: + type: string + description: ID of Meta Field To Search By + restriction: + type: string + description: Type Of Comparison. Depends On Field Type. + value: + type: string + description: Value To Compare To diff --git a/docs/api/mbaas/appdata.yaml b/docs/api/mbaas/appdata.yaml new file mode 100644 index 0000000..73422a8 --- /dev/null +++ b/docs/api/mbaas/appdata.yaml @@ -0,0 +1,144 @@ +swagger: '2.0' +info: + title: MbaaS API + description: Endpoints For App Import and Export + version: "1.0.0" +schemes: + - https +basePath: /api/mbaas/ + +produces: + - application/json + +parameters: + domain: + name: domain + in: path + description: the domain of the application + type: string + required: true + environment: + name: environment + in: path + description: the environment of the application + type: string + required: true + appid: + name: appid + in: path + description: the unique id of the application + type: string + required: true + job_id: + name: job_id + in: path + description: id of the export job to download the data from + type: string + required: true +paths: + "/{domain}/{environment}/{appid}/data/export": + get: + description: List export operations + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + responses: + 200: + description: List of export operations, includes both ongoing operations and historical data + schema: + type: array + items: + $ref: '#/definitions/ExportJob' + 401: + description: Authentication Invalid + tags: + - export + post: + description: Create a new export job + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + responses: + 200: + description: New job created successfully + schema: + $ref: '#/definitions/ExportJob' + 401: + description: Authentication Invalid + 500: + description: Error creating job + + "/{domain}/{environment}/{appid}/data/export/{job_id}": + post: + description: Create download token+url for generated file + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + - $ref: "#/parameters/job_id" + responses: + 200: + description: Token generation successful + schema: + properties: + url: string + description: Download URL for the related file, including single-use token + 401: + description: Authentication Invalid + 404: + description: + Job not found or in invalid state + (i.e. not finished successfully and with file available) + tags: + - export + get: + description: Details of an export operation + parameters: + - $ref: "#/parameters/domain" + - $ref: "#/parameters/environment" + - $ref: "#/parameters/appid" + - $ref: "#/parameters/job_id" + responses: + 200: + description: Details of a single export operation + schema: + $ref: '#/definitions/ExportJob' + 404: + description: job_id not found + 401: + description: Authentication Invalid + tags: + - export +definitions: + ExportJob: + properties: + "_id": + type: string + appid: + type: string + environment: + type: string + domain: + type: string + status: + type: string + step: + type: number + totalSteps: + type: number + fileSize: + type: number + fileDeleted: + type: string + filePath: + type: string + fileId: + type: string + progress: + type: object + created: + type: date + modified: + type: date \ No newline at end of file diff --git a/docs/api/storage.yaml b/docs/api/storage.yaml new file mode 100644 index 0000000..a06c570 --- /dev/null +++ b/docs/api/storage.yaml @@ -0,0 +1,37 @@ +swagger: '2.0' +info: + title: File storage and download API + description: Client-facing file download service for files generated by other processes inside fh-mbaas + version: "1.0.0" +schemes: + - https +basePath: /api/ + +produces: + - application/json + +paths: + '/storage/{id}': + get: + description: download file by id with token authentication + parameters: + - name: id + in: path + required: true + type: string + description: File Id + - name: token + in: query + required: true + type: string + description: temporary token for file download + produces: + - application/json + - application/octet-stream + responses: + 200: + description: Binary file data, should support download resuming + 404: + description: File not found or already deleted + 401: + description: Invalid token diff --git a/fh-mbaas.js b/fh-mbaas.js new file mode 100755 index 0000000..5f07a42 --- /dev/null +++ b/fh-mbaas.js @@ -0,0 +1,341 @@ +#!/usr/bin/env node + +var TITLE = "fh-mbaas"; +process.env.component = TITLE; +process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; +if (!process.env.conf_file) { + process.env.conf_file = process.argv[2]; +} + +var util = require('util'); +var args = require('optimist').argv; +var fs = require('fs'); +var path = require('path'); +var express = require('express'); +var cors = require('cors'); +var bodyParser = require('body-parser'); +var fhconfig = require('fh-config'); +var forms = require('fh-forms'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var fhServiceAuth = require('fh-service-auth'); +var requiredvalidation = require('./lib/util/requiredvalidation.js'); +var log = require('./lib/util/logger'); +var scheduler; +var logger; +var fhComponentMetrics = require('fh-component-metrics'); +var fhcluster = require('fh-cluster'); +var cluster = require('cluster'); +var formsUpdater = require('./lib/formsUpdater'); +var fhlogger = require('fh-logger'); +var amqp = require('./lib/util/amqp.js'); + +var async = require('async'); +var models = require('./lib/models'); +var mongoose = require('mongoose'); + +var mongooseConnection; + + + +var START_AGENDA = "startAgenda"; + +// args and usage +function usage() { + /* eslint-disable no-console */ + console.log("Usage: " + args.$0 + " [-d] (debug) --master-only --workers=[int] \n --master-only will override --workers so should not be used together"); + /* eslint-enable no-console */ + process.exit(0); +} + +if (args.h) { + usage(); +} + +if (args._.length < 1) { + usage(); +} + +//Loading The Config First +loadConfig(function() { + if (args.d === true || args["master-only"] === true) { + + /* eslint-disable no-console */ + console.log("starting single master process"); + /* eslint-enable no-console */ + startWorker(); + initializeScheduler(); + } else { + var preferredWorkerId = fhconfig.value('agenda.preferredWorkerId'); + // Note: if required as a module, its up to the user to call start(); + if (require.main === module) { + var numWorkers = args["workers"]; + fhcluster(startWorker, numWorkers, undefined, [ + { + workerFunction: initializeScheduler, + startEventId: START_AGENDA, + preferredWorkerId: preferredWorkerId + } + ]); + } + } +}); + +/** + * Loading Module Config From File System + * @param cb + */ +function loadConfig(cb) { + // read our config file + var configFile = process.env.conf_file || args._[0]; + + fhconfig.init(configFile, requiredvalidation, function(err) { + if (err) { + /* eslint-disable no-console */ + console.error("Problems reading config file: " + configFile); + console.error(err); + /* eslint-enable no-console */ + process.exit(-1); + } + createAndSetLogger(); + cb(); + }); +} + +function createAndSetLogger() { + // We are overwriting any logger created in fhconfig.init and replacing + // it with an fh-logger instance to allow the underlying logger to be + // controlled (setting log levels for example) + logger = fhlogger.createLogger(fhconfig.getConfig().rawConfig.logger); + + log.setLogger(logger); + + //Setting up namespace for the logger. This allows the logger to track request IDs + //when mongoose queries have completed. + var clsMongoose = require('fh-cls-mongoose'); + var loggerNamespace = logger.getLoggerNamespace(); + clsMongoose(loggerNamespace, mongoose); + + fhconfig.setLogger(logger); + + //Setting logger for fh-forms + forms.init(logger); + + //Setting global forms config + logger.debug("minsPerBackOffIndex", fhconfig.int('fhmbaas.dsMinsPerBackOffIndex')); + forms.core.setConfig({ + minsPerBackOffIndex: fhconfig.int('fhmbaas.dsMinsPerBackOffIndex') + }); +} + +/** + * Initialising The Scheduler. This is bound to a single worker using fh-cluster. + * @param clusterWorker + */ +function initializeScheduler(clusterWorker) { + //Ensuring that the config is loaded. + initModules(clusterWorker, getMbaasMiddlewareConfig(), function() { + if (clusterWorker !== undefined) { + logger.info("Initialising scheduler ", clusterWorker.id, clusterWorker.process.pid); + } else { + logger.info("Initialising scheduler."); + } + scheduler = formsUpdater.scheduler(logger, fhconfig.getConfig().rawConfig, fhconfig.mongoConnectionString()); + logger.info("Initialised scheduler", scheduler); + var appDataExportAgenda = require('./lib/export'); + appDataExportAgenda.scheduler().start(); + }); +} + +function startWorker(clusterWorker) { + + // Note: location/order of these required for istanbul code coverage is important. + if (fhconfig.bool('fhmbaas.code_coverage_enabled')) { + var coverage = require('istanbul-middleware'); + coverage.hookLoader(__dirname); + } + + setupUncaughtExceptionHandler(logger, clusterWorker); + setupFhconfigReloadHandler(fhconfig); + + if (fhconfig.bool('component_metrics.enabled')) { + initComponentMetrics(fhconfig.value('component_metrics')); + } + + initModules(clusterWorker, getMbaasMiddlewareConfig(), startApp); +} + +function initComponentMetrics(metricsConf) { + var metrics = fhComponentMetrics(metricsConf); + + metrics.memory(TITLE, { interval: 2000 }, function(err) { + if (err) { + logger.warn(err); + } + }); + + metrics.cpu(TITLE, { interval: 1000 }, function(err) { + if (err) { + logger.warn(err); + } + }); +} +function getMbaasMiddlewareConfig() { + var conf = fhconfig.getConfig(); + var jsonConfig = { + mongoUrl: fhconfig.mongoConnectionString(), + mongo : { + host: conf.rawConfig.mongo.host, + port: conf.rawConfig.mongo.port, + name: conf.rawConfig.mongo.name, + admin_auth: { + user: conf.rawConfig.mongo.admin_auth.user, + pass: conf.rawConfig.mongo.admin_auth.pass + }, + replicaSet: conf.rawConfig.mongo.replicaset_name + }, + crash_monitor: conf.rawConfig.crash_monitor, + email: conf.rawConfig.email, + fhamqp: conf.rawConfig.fhamqp, + logger: logger + }; + logger.debug('JSON Config ', jsonConfig); + + return jsonConfig; +} + +function initModules(clusterWorker, jsonConfig, cb) { + + async.parallel([ + async.apply(async.waterfall, [ + async.constant(jsonConfig), + fhmbaasMiddleware.init + ]), + function initialiseModels(cb) { + //The models should not be initialised on the mongoose connection for fh-mbaas-middleware. The document instanceof checks will not + //pass and none of the returned models will have the correct schemas attached. + mongooseConnection = mongoose.createConnection(fhconfig.mongoConnectionString()); + handleMongoConnectionEvents(mongooseConnection); + models.init(mongooseConnection, cb); + }, + async.apply(initAmqp, jsonConfig), + async.apply(fhServiceAuth.init, {logger: logger}) + ], function(err) { + if (!err) { + return cb(); + } + logger.error(err); + if (clusterWorker) { + clusterWorker.kill(); + } else { + process.exit(1); + } + }); +} + +function initAmqp(config, cb) { + var migrationStatusHandler = require('./lib/messageHandlers/migrationStatusHandler.js'); + var deployStatusHandler = require('./lib/messageHandlers/deployStatusHandler.js'); + + var amqpConnection = amqp.connect(config); + deployStatusHandler.listenToDeployStatus(amqpConnection, config, function() { + migrationStatusHandler.listenToMigrationStatus(amqpConnection, config); + cb(); + }); +} + +function startApp( ) { + var app = express(); + + app.use(logger.requestIdMiddleware); + + if (fhconfig.bool('component_metrics.enabled')) { + app.use(fhComponentMetrics.timingMiddleware(TITLE, fhconfig.value('component_metrics'))); + } + // Enable CORS for all requests + app.use(cors()); + + // Request logging + app.use(require('express-bunyan-logger')({ logger: logger, parseUA: false })); + + // Parse application/x-www-form-urlencoded + app.use(bodyParser.urlencoded({ + extended: false + })); + + // Parse JSON payloads + app.use(bodyParser.json({limit: fhconfig.value('fhmbaas.maxpayloadsize') || "20mb"})); + + app.use('/sys', require('./lib/handlers/sys.js')()); + app.use('/api/mbaas', require('./lib/handlers/api.js')); + app.use('/api/app', require('./lib/handlers/app.js')); + app.use('/api/storage', require('./lib/storage').router); + + var port = fhconfig.int('fhmbaas.port'); + app.listen(port, function() { + // Get our version number from package.json + var pkg = JSON.parse(fs.readFileSync(path.join(__dirname, './package.json'), "utf8")); + /* eslint-disable no-console */ + console.log("Started " + TITLE + " version: " + pkg.version + " at: " + new Date() + " on port: " + port); + /* eslint-enable no-console */ + }); +} + +function setupFhconfigReloadHandler(fhconfig) { + process.on(fhconfig.RELOAD_CONFIG_SIGNAL, function() { + fhconfig.reload(cluster.workers, function(err) { + if (err) { + /* eslint-disable no-console */ + console.error("Config not reloaded"); + console.error(err); + console.error("Please fix and try again!!"); + /* eslint-enable no-console */ + } + createAndSetLogger(); + }); + }); +} + +//Closing a mongoose connection if needed. +function closeMongooseConnection() { + if (mongooseConnection) { + mongooseConnection.close(); + } +} + +function handleMongoConnectionEvents(conn) { + if (conn) { + conn.on('error', function(err) { + logger.error('Mongo connection error: ' + err); + throw err; + }); + + conn.on('disconnected', function() { + logger.error('Mongo connection lost. Socket closed'); + throw new Error('Mongo close even emitted'); + }); + } +} + +function setupUncaughtExceptionHandler(logger, worker) { + // handle uncaught exceptions + process.on('uncaughtException', function(err) { + logger.error("FATAL: UncaughtException, please report: " + util.inspect(err)); + /* eslint-disable no-console */ + console.error(new Date().toString() + " FATAL: UncaughtException, please report: " + util.inspect(err)); + /* eslint-enable no-console */ + if (err !== undefined && err.stack !== undefined) { + logger.error(util.inspect(err.stack)); + /* eslint-disable no-console */ + console.trace(err.stack); + /* eslint-enable no-console */ + } + if (worker && worker.process) { + worker.process.exit(1); + } else { + process.exit(1); + } + }); + + // If the Node process ends, close the Mongoose connection + process.on('SIGINT', closeMongooseConnection).on('SIGTERM', closeMongooseConnection); +} diff --git a/fh-mbaas.yml b/fh-mbaas.yml new file mode 100755 index 0000000..6e8f48d --- /dev/null +++ b/fh-mbaas.yml @@ -0,0 +1,238 @@ +# mbaas endpoint routes +swagger: '2.0' +info: + title: MbaaS API + description: Endpoints For FH-MBaaS service + version: "1.0.2" + +schemes: + - https + +produces: + - application/json + +consumes: + - application/json + +paths: + /sys/info/ping: + get: + produces: + - text/plain + description: "tests if fh-mbaas service is up and running" + responses: + 200: + description: "signals that service is up and running" + tags: + - sys + /sys/info/version: + get: + description: "retrieves version of the fh-mbaas service" + responses: + 200: + description: "returned upon successfull version retrieval" + 500: + description: "returned when there was error to get the version" + tags: + - sys + /sys/info/health: + get: + description: "checks status of the fh-mbaas health by checking MongoDB and fh-ditch connections" + responses: + 200: + description: "returns health status" + schema: + $ref: '#/definitions/Health' + tags: + - sys + /api/mbaas/{domain}/{environment}/db: + post: + description: "gets fh-mbaas instance and creates database on it" + parameters: + - in: path + name: domain + description: "the domain to create a database instance on" + type: string + required: true + - in: path + name: environment + description: "the environment in the domain to create a database instance on" + type: string + required: true + responses: + 200: + description: "returned database connection URI when database instance was created on specified domain and environment" + schema: + $ref: '#/definitions/DbURI' + 500: + description: "returned when it was not possible to get fh-mbaas instance or it was not possible to create a database on it" + tags: + - api + - mbaas + /api/mbaas/apps/{domain}/{environment}/{appname}/migratedb: + post: + description: "migrates a database of the application in some domain and environment" + parameters: + - in: path + name: domain + description: "the domain of an application to migrate the database of" + type: string + required: true + - in: path + name: environment + description: "the environment of an application to migrate the database of" + type: string + required: true + - in: path + name: appname + description: "the name of an application to migrate the database of" + type: string + required: true + - in: body + name: body + description: migration parameters + required: true + schema: + $ref: '#/definitions/AppMigrationParameters' + responses: + 200: + description: "returned when the migration was successful" + 400: + description: "returned when the request did not contain the body with either \"cacheKey\" or \"appGuid\" parameter" + 423: + description: "returned when the migration is already started or completed" + 500: + description: "returned when it was not possible to create application mbaas instance or it was not possible to create database for the application or if stopping of the application was unsuccessful or there as an error while the migration was performed" + tags: + - api + - mbaas + /api/mbaas/apps/{domain}/{environment}/{appname}/migrateComplete: + post: + description: "completes the database migration of specified application in some domain and environment" + parameters: + - in: path + name: domain + description: "the domain of an application to complete the migration of the database of" + type: string + required: true + - in: path + name: environment + description: "the environment of an application to complete the migration the database of" + type: string + required: true + - in: path + name: appname + description: "the name of an application to complete the migration the database of" + type: string + required: true + - in: body + name: body + description: migration parameters + required: true + schema: + $ref: '#/definitions/AppMigrationParameters' + responses: + 200: + description: "returned when database migration is complete" + 400: + description: "returned when the request did not contain the body with either \"cacheKey\" or \"appGuid\" parameter or there was not app mbaas instance found for specified appname" + 500: + description: "returned when there was an error while looking for app mbaas instance or the database migration failed to complete properly" + /api/mbaas/apps/{domain}/{environment}/{appname}/db: + delete: + description: "deletes the database of specified application" + parameters: + - in: path + name: domain + description: "the domain of an application to complete the migration of the database of" + type: string + required: true + - in: path + name: environment + description: "the environment of an application to complete the migration the database of" + type: string + required: true + - in: path + name: appname + description: "the name of an application to complete the migration the database of" + type: string + required: true + responses: + 200: + description: "returned when database deletion was successful or there was not any database to delete" + 500: + description: "returned when there was an error to find the app mbaas instance or to remove the database for the specified application" + tags: + - api + - mbaas + /api/mbaas/apps/{domain}/{environment}/{appname}/env: + get: + description: "gets the environment information for an application in an environment for a domain" + parameters: + - in: path + name: domain + description: "the domain of an application to complete the migration of the database of" + type: string + required: true + - in: path + name: environment + description: "the environment of an application to complete the migration the database of" + type: string + required: true + - in: path + name: appname + description: "the name of an application to complete the migration the database of" + type: string + required: true + responses: + 200: + description: body with environment information + tags: + - api + - mbaas + - env + +definitions: + Health: + properties: + status: + type: string + description: "" + summary: + description: "" + type: string + enum: [ + "No issues to report. All tests passed without error", + "Some non-critical tests encountered issues. See the \"details\" object for specifics.", + "A critical test item enconutered an error. Please investigate this. See the \"details\" object for specifics." + ] + details: + type: array + items: + $ref: '#/definitions/HealthDetail' + HealthDetail: + properties: + description: + description: "description of the health check" + type: string + test_status: + description: "status of the health check" + type: string + enum: [ "ok", "warn", "crit" ] + result: + description: "brief result of the check in a string form" + type: string + runtime: + description: "number indicating how long some particular health check lasted in milliseconds" + type: integer + DbURI: + properties: + uri: + type: string + description: "URI with mongodb schema" + AppMigrationParameters: + properties: + cacheKey: + type: string + appGuid: + type: string \ No newline at end of file diff --git a/lib/appdata/import/appDataImport.js b/lib/appdata/import/appDataImport.js new file mode 100644 index 0000000..d5d4d93 --- /dev/null +++ b/lib/appdata/import/appDataImport.js @@ -0,0 +1,37 @@ +var MongoWrapper = require('../shared/mongowrapper/mongowrapper').MongoWrapper; + +/** + * Runs the import for the specified collection + * @param host mongo host + * @param port mongo port + * @param database mongo database + * @param filename collection bson file to be imported (full path) + * @param cb + */ +function mongoImport(host, port, database, filename, cb) { + var mongoWrapper = new MongoWrapper(); + + mongoWrapper.restore() + .withDatabase(database) + .withPath(filename) + .withHost(host) + .withPort(port) + .withDetectMaster() + .run(function(err, process) { + if (err) { + return cb(err); + } + process.on('close', function(code) { + if (code === 0) { + return cb(); + } + return cb(new Error('Error executing import. Return code : ' + code)); + }) + .on('error', function(err) { + return cb(new Error(err)); + }); + + }); +} + +module.exports.mongoImport = mongoImport; diff --git a/lib/appdata/import/appDataImportRunner.js b/lib/appdata/import/appDataImportRunner.js new file mode 100644 index 0000000..afc2c27 --- /dev/null +++ b/lib/appdata/import/appDataImportRunner.js @@ -0,0 +1,121 @@ +const async = require('async'); +const prepareForImport = require('./preparationSteps').prepareForImport; +const mongoImport = require('./appDataImport').mongoImport; +const EventEmitter = require('events').EventEmitter; +const util = require('util'); +const path = require('path'); + +const START_EVENT = require('../../jobs/progressPublisher').START_EVENT; +const FINISH_EVENT = require('../../jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('../../jobs/progressPublisher').FAIL_EVENT; +const HEARTBEAT_EVENT = 'heartbeat'; + +/** + * Creates an AppDataImportRunner instance + * @param context the context of the import. This will contain: + * Context is as follows: + * { logger: logger, + * input: { + * appData: { + * guid: appguid // REQUIRED TO START THE IMPORT + * env: environment // REQUIRED TO START THE IMPORT + * }, + * path: filePath // REQUIRED TO START THE IMPORT + * folder: fileFolder // COMPUTED AUTOMATICALLY + * }, + * output: { + * folder: outputFolder // COMPUTED AUTOMATICALLY + * files: [] // COMPUTED AUTOMATICALLY + * }, + * importJob: jobModel //RQUIRED TO START THE IMPORT the appdata job model + * } + * @constructor + */ +function AppDataImportRunner(context, keepalive) { + this.keepalive = keepalive ? keepalive : 30000; + EventEmitter.call(this); + context.emitter = this; + this.context = context; +} + +function performImport(context, file, cb) { + var dbConf = context.appInfo.dbConf; + var logger = context.logger; + + var importFile = path.join(context.input.folder, file); + + logger.info('Importing file', {path: importFile, db: { host: dbConf.host, port:dbConf.port, name: dbConf.name} }); + + mongoImport(dbConf.host, dbConf.port, dbConf.name, importFile, function(err) { + if (!err) { + context.progress.next(); + } + cb(err, context); + }); +} + +function importFiles(context, cb) { + async.eachSeries( + context.output.files, + async.apply(performImport, context), function(err) { + cb(err, context); + }); +} + +util.inherits(AppDataImportRunner, EventEmitter); + +AppDataImportRunner.prototype.run = function() { + var self = this; + var logger = this.context.logger; + self.interval = this.heartbeat(); + + var hostName = require('os').hostname(); + + var jobModel = this.context.jobModel; + + if (jobModel.metadata.shipHostname !== hostName) { + // File is not mine. Nothing to do. + logger.info('File was uploaded on another host. Ignoring.', {thisHost: hostName, fileHost: jobModel.metadata.shipHostname}); + return; + } + + logger.info('Application data import started'); + + + self.emit(START_EVENT, self.context.appInfo, 'Import started'); + async.waterfall([ + async.apply(prepareForImport, self.context), + importFiles + ], function(err) { + + // Stop heartbeat + if (self.interval) { + clearInterval(self.interval); + } + + if (err) { + logger.error('Import failed', {err: err}); + self.emit(FAIL_EVENT, err, self.context.appInfo); + } else { + logger.info('Import finished'); + self.emit(FINISH_EVENT, self.context.appInfo, 'Import finished'); + } + }); +}; + +AppDataImportRunner.prototype.heartbeat = function() { + var self = this; + return setInterval(function() { + var importJob = self.context.jobModel; + importJob.markModified('modified'); + importJob.save(function(err) { + if (err) { + self.context.logger.error('Failed to save job', {err: err}); + } + }); + self.emit(HEARTBEAT_EVENT); + }, self.keepalive); +}; + +module.exports.AppDataImportRunner = AppDataImportRunner; +module.exports.HEARTBEAT_EVENT = HEARTBEAT_EVENT; \ No newline at end of file diff --git a/lib/appdata/import/preparationSteps.js b/lib/appdata/import/preparationSteps.js new file mode 100644 index 0000000..15775ea --- /dev/null +++ b/lib/appdata/import/preparationSteps.js @@ -0,0 +1,161 @@ +const path = require('path'); +const async = require('async'); +const fs = require('fs'); +const models = require('fh-mbaas-middleware').models; + +const extractTarFile = require('../shared/common').extractTarFile; +const gunzip = require('../shared/common').gunzip; + +/** + * Checks if the app has been migrated. + * + * @param context + * @param cb + */ +function checkAppIsMigrated(context, cb) { + + var logger = context.logger; + + logger.info('Cheking if app is migrated'); + + var AppMbaasModel = models.getModels().AppMbaas; + var appGuid = context.appInfo.guid; + var env = context.appInfo.environment; + + AppMbaasModel.findOne({guid: appGuid, environment: env}, function(err, app) { + if (err) { + return cb(err, context); + } + + if (!app) { + return cb('No app with guid "' + appGuid + '" and env "' + env + '" could be found'); + } + + if (!app.dbConf) { + // The app has not been upgraded yet + return cb(new Error('The app has not been migrated yet. Import aborted'), context); + } + + context.appInfo = app; + cb(null, context); + }); +} + +/** + * Gets the list of extracted files from the disk + * @param context + * @param cb + */ +function getListOfFiles(context, cb) { + var logger = context.logger; + + logger.info('Getting list of extracted files'); + + context.output = {}; + fs.readdir(context.input.folder, function(err, items) { + if (err) { + return cb(new Error(err), context); + } + + var basename = path.basename(context.input.path); + + // Remove the tar file name from the list of files contained in the directory + var index = items.indexOf(basename); + if (index < 0) { + var error = new Error('The content of the directory has been changed (cannot find "' + context.input.path + '" anymore)'); + error.code = 500; + return cb(error); + } + items.splice(index, 1); + + context.output.files = items; + + /* To show the progress bar we need to know the total number of steps we need to execute. + All the steps, excluding the gunzip and import are fixed. + On the other hand, gunzip and import, depend on the number of files to be gunzipped and imported. + Since now we know how many files we are going to process, we can finally set the 'total' number of steps. + */ + + // Now we know how many files we are going to import, we can start sending progress events + + /* The files will be processed twice: + * First time they will be gunzipped + * Second time they will be imported + + That means that the number of steps to be executed is obtained by doubling the number of files + */ + context.progress.total = context.output.files.length * 2; + + cb(null, context); + }); +} + +function extractAndDelete(context, file, cb) { + + var logger = context.logger; + logger.info('Gunzipping received files'); + + cb = context.progress.wrappCallback(cb); + + var resultingFile; + + if (/\.gz$/.test(file)) { + async.series([ + function(callback) { + gunzip(context.output.folder, file, function(err, outFile) { + if (err) { + return callback(err); + } + resultingFile = outFile; + callback(null); + }); + }, + async.apply(fs.unlink, path.join(context.output.folder, file)) + ], function(err) { + if (!err) { + return cb(null, resultingFile); + } + return cb(new Error(err)); + }); + } else { + return cb(new Error('Extraneous file found in import directory'), context); + } +} + +/** + * Output: + * context.output.files : list of ungzipped files + * @param context + * @param cb + */ +function uncompressGZipFiles(context, cb) { + context.output.folder = context.input.folder; + + async.mapLimit(context.output.files, + 2, + function(file, cb) { + extractAndDelete(context, file, cb); + }, + function(err, resultingFiles) { + context.output.files = resultingFiles; + cb(err, context); + }); +} + +/** + * Performs all the preparation steps needed to be able to import the file + * @param context import process context + * @param cb the callback + */ +function prepareForImport(context, cb) { + async.waterfall([ + async.apply(checkAppIsMigrated, context), + extractTarFile, + getListOfFiles, + uncompressGZipFiles + ], function(err) { + cb(err, context); + }); +} + +module.exports.prepareForImport=prepareForImport; \ No newline at end of file diff --git a/lib/appdata/shared/commandUtils.js b/lib/appdata/shared/commandUtils.js new file mode 100644 index 0000000..8880c30 --- /dev/null +++ b/lib/appdata/shared/commandUtils.js @@ -0,0 +1,25 @@ +function addBooleanParam(ary, paramsMap, key, param, negate) { + var val = paramsMap[key]; + + if (val === undefined || val === null) { + // value not present + return; + } + + val = negate ? !val : val; + + if (val) { + ary.push(param); + } +} + +function addParam(ary, paramsMap, key, param) { + var val = paramsMap[key]; + + if (val) { + ary.push(param, val); + } +} + +module.exports.addParam = addParam; +module.exports.addBooleanParam = addBooleanParam; \ No newline at end of file diff --git a/lib/appdata/shared/common.js b/lib/appdata/shared/common.js new file mode 100644 index 0000000..cf35741 --- /dev/null +++ b/lib/appdata/shared/common.js @@ -0,0 +1,164 @@ +const path = require('path'); +const TarWrapper = require('./tarwrapper/tarwrapper').TarWrapper; +const fs = require('fs'); +const zlib = require('zlib'); + +const CONSTANTS = require('./constants'); + +const util = require('util'); + +const async = require('async'); + +/** + * Extract a tar file + * + * @param context the context of the operation. It must have at least following fields: + * context = { + * input: { + * folder: 'folder where extraction will be performed' + * path : 'path to the file to be extracted' + * } + * } + * @param cb + */ +function extractTarFile(context, cb) { + var filepath = context.input.path; + var logger = context.logger; + + context.input.folder = path.dirname(filepath); + + logger.info('Extracting file %s in %s folder', filepath, context.input.folder); + + new TarWrapper(filepath) + .extract() + .withCWD(context.input.folder) + .run() + .on('close', function(code) { + if (code === 0) { + return cb(null, context); + } + return cb(new Error('Error executing tar command. Return code : ' + code), context); + }) + .on('error', function(err) { + return cb(new Error(err), context); + }); +} + +/** + * GUnzip the specified file. + * As per standard gunzip behaviour, the '.gz' extension is stripped off to get the original file name. + * + * @param folder folder that contains the file to be gunzipped and will contain the destination file. + * @param file file to be gunzipped, + * @param cb + */ +function gunzip(folder, file, cb) { + var inFile = path.join(folder, file); + fs.exists(inFile, function(exists) { + if (!exists) { + return cb(new Error('Unable to find file at "' + inFile + '"')); + } + var outFile = file.slice(0, -3); + var gzReadStream = fs.createReadStream(path.join(folder, file)); + var writeStream = fs.createWriteStream(path.join(folder, outFile)); + + var gunzip = zlib.createGunzip(); + gzReadStream.pipe(gunzip); + gunzip.pipe(writeStream); + + gunzip.on("end", function() { + return cb(null, outFile); + }).on("error", function(err) { + cb(err); + }); + }); +} + +/** + * Connect on the specified host and execute a query to get the host with the specified role + * @param host host to connect to + * @param port port + * @param role role to be searched + * @param cb + * @private + */ +function _getMongoHost(host, port,role, cb) { + const Mongo = require('./mongowrapper/mongoCommand').Mongo; + new Mongo() + .withQuiet() + .withHost(host) + .withPort(port) + .withEval(util.format("this.rs.status().members && this.rs.status().members.filter(function (x) { return x.stateStr === '%s' })[0].name", role)) + .run(function(err, process) { + if (err) { + return cb(err); + } + + var result = { + port: CONSTANTS.MONGO_DEFAULT_PORT + }; + + var stdout = ''; + process.stdout.on('data', function(data) { + stdout += data.toString(); + }); + + process.on('close', function(code) { + if (code === 0) { + // read host from stdout + if (stdout.trim().length === 0) { + return cb(null, {host: host, port: port}); + } + + if (stdout.indexOf(":") > 0) { + var parts = stdout.split(":"); + result.host = parts[0].trim(); + result.port = parts[1].trim(); + } else { + result.host = stdout.trim(); + } + return cb(null, result); + } + return cb(new Error(util.format('Unable to detect %s instance from host %s:%d', role, host, port))); + }) + .on('error', function(err) { + return cb(new Error(err)); + }); + }); +} + +/** + * Detect which server has the specified role + * @param hostList comma separated list of hosts + * @param port port + * @param role role we are searching for + * @param cb + */ +function getMongoHost(hostList, port,role, cb) { + var hosts = hostList.split(','); + + var detectedHost = undefined; + + async.everyLimit(hosts, 1, function(host, callback) { + + _getMongoHost(host, port, role, function(err, master) { + if (err) { + // server with specified role not found. Try next host. + return callback(true); + } else { + detectedHost = master; + // interrupt the loop + callback(false); + } + }); + }, function(notFound) { + if (notFound) { + return cb('Unable to detect ' + role + ' instance'); + } + + cb(null, detectedHost); + }); +} +module.exports.extractTarFile = extractTarFile; +module.exports.gunzip = gunzip; +module.exports.getMongoHost = getMongoHost; diff --git a/lib/appdata/shared/constants.js b/lib/appdata/shared/constants.js new file mode 100644 index 0000000..a89ae95 --- /dev/null +++ b/lib/appdata/shared/constants.js @@ -0,0 +1,3 @@ + +module.exports.MONGO_DEFAULT_PORT = 27017; +module.exports.MONGO_HOST_ROLE = { MASTER: 'PRIMARY', SLAVE: 'SECONDARY'}; \ No newline at end of file diff --git a/lib/appdata/shared/mongowrapper/mongoCommand.js b/lib/appdata/shared/mongowrapper/mongoCommand.js new file mode 100644 index 0000000..f5a9423 --- /dev/null +++ b/lib/appdata/shared/mongowrapper/mongoCommand.js @@ -0,0 +1,77 @@ +var child_process = require('child_process'); +const addParam = require('../commandUtils').addParam; +const addBooleanParam = require('../commandUtils').addBooleanParam; +const MONGO = 'mongo'; +const async = require('async'); + +function Mongo() { + this.paramsMap = {}; + + var self = this; + + // Fluent api to be used for configuring + this.withHost = function(host) { + self.paramsMap.host = host; + return self; + }; + + this.withPort = function(port) { + self.paramsMap.port = port; + return self; + }; + + this.withQuiet = function() { + self.paramsMap.quiet = true; + return self; + }; + + this.withEval = function(evalString) { + self.paramsMap.eval = evalString; + return self; + }; + +} + +/** + * Creates an array of parameters, as expected by spawn, based on how the object has been configured. + * + * @param paramsMap map of all the configured parameters + * @returns {Array} an array as expected by spawn + */ +function buildParamsAry(paramsMap, cb) { + var result = []; + + addBooleanParam(result, paramsMap, 'quiet', '--quiet'); + addParam(result, paramsMap, 'host', '--host'); + addParam(result, paramsMap, 'port', '--port'); + addParam(result, paramsMap, 'eval', '--eval'); + + cb(null, result); +} + +function validateParams(paramsMap, cb) { + if (!paramsMap.host) { + return cb('Database host is mandatory'); + } + cb(null, paramsMap); +} + +/** + * Spawns the restore command with the configured parameters + * @returns {*} + */ +Mongo.prototype.run = function(cb) { + + async.waterfall([ + async.apply(validateParams, this.paramsMap), + buildParamsAry + ], function(err, params) { + if (err) { + return cb(err); + } + + cb(null, child_process.spawn(MONGO, params)); + }); +}; + +module.exports.Mongo = Mongo; \ No newline at end of file diff --git a/lib/appdata/shared/mongowrapper/mongowrapper.js b/lib/appdata/shared/mongowrapper/mongowrapper.js new file mode 100644 index 0000000..258787b --- /dev/null +++ b/lib/appdata/shared/mongowrapper/mongowrapper.js @@ -0,0 +1,23 @@ +const Restore = require('./restoreCommand').Restore; +const Mongo = require('./mongoCommand').Mongo; + +/** + * Wrapper object for mongodump/mongorestore commands + * @constructor + */ +function MongoWrapper() { +} + +/** + * To be invoked to obtain an instance of the restore handler. + * That instance must be configured through the fluent API. + */ +MongoWrapper.prototype.restore = function() { + return new Restore(); +}; + +MongoWrapper.prototype.mongo = function() { + return new Mongo(); +}; + +module.exports.MongoWrapper = MongoWrapper; diff --git a/lib/appdata/shared/mongowrapper/restoreCommand.js b/lib/appdata/shared/mongowrapper/restoreCommand.js new file mode 100644 index 0000000..eafc6b7 --- /dev/null +++ b/lib/appdata/shared/mongowrapper/restoreCommand.js @@ -0,0 +1,197 @@ +var child_process = require('child_process'); +const addParam = require('../commandUtils').addParam; +const addBooleanParam = require('../commandUtils').addBooleanParam; +const CONSTANTS = require('../constants'); +const common = require('../common'); +const RESTORE = 'mongorestore'; +const async = require('async'); + +function Restore() { + this.paramsMap = {}; + + var self = this; + // Fluent api to be used for configuring + this.withHost = function(host) { + self.paramsMap.host = host; + return self; + }; + + this.withPort = function(port) { + self.paramsMap.port = port; + return self; + }; + + this.withVerbose = function(verbose) { + self.paramsMap.verbose = verbose; + return self; + }; + + this.withAuthenticationDatabase = function(authDatabase) { + self.paramsMap.authDatabase = authDatabase; + return self; + }; + + this.withAuthenticationMechanism = function(mechanism) { + self.paramsMap.mechanism = mechanism; + return self; + }; + + this.withDbPath = function(dbPath, journal) { + self.paramsMap.dbPath = dbPath; + self.paramsMap.journal = journal; + return self; + }; + + this.withDirectoryPerDb = function() { + self.paramsMap.directoryPerDB = true; + return self; + }; + + this.withDatabase = function(database) { + self.paramsMap.database = database; + return self; + }; + + this.withCollection = function(collection) { + self.paramsMap.collection = collection; + return self; + }; + + this.withObjcheck = function(objcheck) { + self.paramsMap.objcheck = objcheck; + return self; + }; + + this.withFilter = function(filter) { + self.paramsMap.filter = filter; + return self; + }; + + this.withDrop = function() { + self.paramsMap.drop = true; + return self; + }; + + this.withOpLogReplay = function() { + self.paramsMap.opLogReplay = true; + return self; + }; + + this.withOpLogLimit = function(limit) { + self.paramsMap.opLogLimit = limit; + return self; + }; + + this.withKeepIndexVersion = function() { + self.paramsMap.keepIndexVersion = true; + return self; + }; + + this.withNoOptionsRestore = function() { + self.paramsMap.noOptionsRestore = true; + return self; + }; + + this.withNoIndexRestore = function() { + self.paramsMap.noIndexRestore = true; + return self; + }; + + this.withMinimumNumberOfReplicaPerWrite = function(number) { + self.paramsMap.minimumNumberOfReplicasPerWrite = number; + return self; + }; + + this.withPath = function(path) { + self.paramsMap.path = path; + return self; + }; + + this.withDetectMaster = function() { + self.paramsMap.detectMaster = true; + return self; + }; +} + +function fillHostAndPort(paramsMap, cb) { + if (paramsMap.detectMaster) { + common.getMongoHost(paramsMap.host, paramsMap.port, CONSTANTS.MONGO_HOST_ROLE.MASTER, function(err, result) { + if (err) { + return cb(err); + } + paramsMap.host = result.host; + paramsMap.port = result.port; + + return cb(null, paramsMap); + }); + } else { + cb(null, paramsMap); + } +} + +/** + * Creates an array of parameters, as expected by spawn, based on how the object has been configured. + * + * @param paramsMap map of all the configured parameters + * @returns {Array} an array as expected by spawn + */ +function buildParamsAry(paramsMap, cb) { + var result = []; + + addParam(result, paramsMap, 'host', '--host'); + addParam(result, paramsMap, 'port', '--port'); + addParam(result, paramsMap, 'username', '--username'); + addParam(result, paramsMap, 'password', '--password'); + addParam(result, paramsMap, 'authDatabase', '--authenticationDatabase'); + addParam(result, paramsMap, 'mechanism', '--authenticationMechanism'); + addParam(result, paramsMap, 'dbPath', '--dbpath'); + addBooleanParam(result, paramsMap, 'directoryPerDB', '--directoryPerDB'); + addBooleanParam(result, paramsMap, 'journal', '--journal'); + addParam(result, paramsMap, 'database', '--db'); + addParam(result, paramsMap, 'collection', '--collection'); + addBooleanParam(result, paramsMap, 'objcheck', '--objcheck'); + addBooleanParam(result, paramsMap, 'objcheck', '--noobjcheck', true); + addParam(result, paramsMap, 'filter', '--filter'); + addBooleanParam(result, paramsMap, 'drop', '--drop'); + addBooleanParam(result, paramsMap, 'opLogReplay', '--oplogReplay'); + addParam(result, paramsMap, 'opLogLimit', '--oplogLimit'); + addBooleanParam(result, paramsMap, 'keepIndexVersion', '--keepIndexVersion'); + addBooleanParam(result, paramsMap, 'noOptionsRestore', '--noOptionsRestore'); + addBooleanParam(result, paramsMap, 'noIndexRestore', '--noIndexRestore'); + addParam(result, paramsMap, 'minimumNumberOfReplicasPerWrite', '--w'); + + // this must be the last, together with directory (only one of them is allowed) + if (paramsMap.path) { + result.push(paramsMap.path); + } + + cb(null, result); +} + +function validateParams(paramsMap, cb) { + if (!paramsMap.path) { + return cb('Path to the import file (or containing directory) is mandatory'); + } + cb(null, paramsMap); +} + +/** + * Spawns the restore command with the configured parameters + * @returns {*} + */ +Restore.prototype.run = function(cb) { + + async.waterfall([ + async.apply(fillHostAndPort, this.paramsMap), + validateParams, + buildParamsAry + ], function(err, params) { + if (err) { + return cb(err); + } + + cb(null, child_process.spawn(RESTORE, params)); + }); +}; + +module.exports.Restore = Restore; \ No newline at end of file diff --git a/lib/appdata/shared/tarwrapper/extractCommand.js b/lib/appdata/shared/tarwrapper/extractCommand.js new file mode 100644 index 0000000..e508ca1 --- /dev/null +++ b/lib/appdata/shared/tarwrapper/extractCommand.js @@ -0,0 +1,242 @@ +var child_process = require('child_process'); +const addParam = require('../commandUtils').addParam; +const addBooleanParam = require('../commandUtils').addBooleanParam; +const TAR = 'tar'; + +function Extract() { + this.paramsMap = {}; + + var self = this; + + // Fluent api to be used for configuring + + /** + * Sets the tar file to be extracted + * @param path + * @returns {Extract} + */ + this.withFile = function(path) { + self.paramsMap.file = path; + return self; + }; + + /** + * Don't replace existing files when extracting + * @returns {Extract} + */ + this.withKeepOldFiles = function() { + self.paramsMap.keepOldFiles = true; + return self; + }; + + /** + * don't replace existing files that are newer than + * their archive copies + * @returns {Extract} + */ + this.withKeepNewerFiles = function() { + self.paramsMap.keepNewerFiles = true; + return self; + }; + + /** + * overwrite metadata of existing directories when + * extracting (default) + * @param overwrite + * @returns {Extract} + */ + this.withOverwriteDir = function(overwrite) { + self.paramsMap.overwriteDir = overwrite; + return self; + }; + + /** + * Overwrite existing files when extracting + * @returns {Extract} + */ + this.withOverwrite = function() { + self.paramsMap.overwrite = true; + return self; + }; + + /** + * Empty hierarchies prior to extracting directory + * @returns {Extract} + */ + this.withRecursiveUnlink = function() { + self.paramsMap.recursiveUnlink = true; + return self; + }; + + /** + * remove each file prior to extracting over it + * @returns {Extract} + */ + this.withUnlinkFirst = function() { + self.paramsMap.unlinkFirst = true; + return self; + }; + + /** + * ignore exit codes of children + * @param ignore + * @returns {Extract} + */ + this.withIgnoreCommandError = function(ignore) { + self.paramsMap.ignoreCommandError = ignore; + return self; + }; + + /** + * extract files to standard output + * @returns {Extract} + */ + this.withOutToStdOut = function() { + self.paramsMap.stdout = true; + return self; + }; + + /** + * pipe extracted files to another program + * @param toCommand + * @returns {Extract} + */ + this.withToCommand = function(toCommand) { + self.paramsMap.toCommand = toCommand; + return self; + }; + + /** + * preserve access times on dumped files, either + * by restoring the times after reading (if atimeFilePreserve == true or 'replace'; default) + * or by not setting the times in the first place (atimeFilePreserve='system') + * @param atimeFilePreserve + * @returns {Extract} + */ + this.withATimeFilePreserve = function(atimeFilePreserve) { + self.paramsMap.atimeFilePreserve = atimeFilePreserve; + return self; + }; + + /** + * Delay setting modification times and + * permissions of extracted directories until the end + * of extraction + * @param delayDirectoryRestore + * @returns {Extract} + */ + this.withDelayDirectoryRestore = function(delayDirectoryRestore) { + self.paramsMap.delayDirectoryRestore = delayDirectoryRestore; + return self; + }; + + /** + * don't extract file modified time + * @returns {Extract} + */ + this.withTouch = function() { + self.paramsMap.touch = true; + return self; + }; + + /** + * try extracting files with the same ownership as + * exists in the archive (default for superuser) + * @param sameOwner + * @returns {Extract} + */ + this.withSameOwner = function(sameOwner) { + self.paramsMap.noSameOwner = sameOwner; + return self; + }; + + /** + * Preserve the permission (if samePermission == true), otherwise + * apply the user's umask when extracting permissions + * from the archive (default for ordinary users) + * @param samePermission + * @returns {Extract} + */ + this.withSamePermissions = function(samePermission) { + self.paramsMap.noSamePermissions = samePermission; + return self; + }; + + /** + * always use numbers for user/group names + * @returns {Extract} + */ + this.withNumericOwner = function() { + self.paramsMap.numericOwner = true; + return self; + }; + + /** + * Current working directory + * @param cwd + * @returns {Extract} + */ + this.withCWD = function(cwd) { + self.paramsMap.cwd = cwd; + return self; + }; +} + +function addParamAutoDetect(result, paramsMap, key, param, negate) { + if (paramsMap[key] === true) { + addBooleanParam(result, paramsMap, key, param, negate); + } else if (paramsMap[key]) { + addParam(result, paramsMap, key, param); + } +} + +function buildParamsAry(paramsMap) { + var result = ['--extract']; + + addParamAutoDetect(result, paramsMap, 'keepOldFiles', '--keep-old-files'); + addParamAutoDetect(result, paramsMap, 'keepNewerFiles', '--keep-newer-files'); + addParamAutoDetect(result, paramsMap, 'overwriteDir', '--overwrite-dir'); + addParamAutoDetect(result, paramsMap, 'overwriteDir', '--no-overwrite-dir', true); + addParamAutoDetect(result, paramsMap, 'overwrite', '--overwrite'); + addParamAutoDetect(result, paramsMap, 'recursiveUnlink', '--recursive-unlink'); + addParamAutoDetect(result, paramsMap, 'unlinkFirst', '--unlink-first'); + addParamAutoDetect(result, paramsMap, 'ignoreCommandError', '--ignore-command-error'); + addParamAutoDetect(result, paramsMap, 'stdout', '--to-stdout'); + addParamAutoDetect(result, paramsMap, 'toCommand', '--to-command'); + addParamAutoDetect(result, paramsMap, 'atimeFilePreserve', '--atime-preserve'); + addParamAutoDetect(result, paramsMap, 'delayDirectoryRestore', '--delay-directory-restore'); + addParamAutoDetect(result, paramsMap, 'touch', '--touch'); + addParamAutoDetect(result, paramsMap, 'sameOwner', '--same-owner'); + addParamAutoDetect(result, paramsMap, 'sameOwner', '--no-same-owner', true); + + addParamAutoDetect(result, paramsMap, 'samePermissions', '--same-permissions'); + addParamAutoDetect(result, paramsMap, 'samePermissions', '--no-same-permissions', true); + + addParamAutoDetect(result, paramsMap, 'numericOwner', '--numeric-owner'); + addParamAutoDetect(result, paramsMap, 'owner', '--owner'); + + addParamAutoDetect(result, paramsMap, 'file', '--file'); + + return result; +} + +/** + * Spawns the restore command with the configured parameters + * @returns {*} + */ +Extract.prototype.run = function() { + + if (!this.paramsMap.file) { + throw new Error('Path to the file to untar is mandatory'); + } + + var params = buildParamsAry(this.paramsMap); + + if (this.paramsMap.cwd) { + return child_process.spawn(TAR, params, {cwd: this.paramsMap.cwd}); + } else { + return child_process.spawn(TAR, params); + } +}; + +module.exports.Extract = Extract; \ No newline at end of file diff --git a/lib/appdata/shared/tarwrapper/tarwrapper.js b/lib/appdata/shared/tarwrapper/tarwrapper.js new file mode 100644 index 0000000..5189690 --- /dev/null +++ b/lib/appdata/shared/tarwrapper/tarwrapper.js @@ -0,0 +1,12 @@ +const Extract = require('./extractCommand').Extract; + +function TarWrapper(file) { + this.file = file; +} + +TarWrapper.prototype.extract = function() { + return new Extract() + .withFile(this.file); +}; + +module.exports.TarWrapper = TarWrapper; diff --git a/lib/constants/configProperties.js b/lib/constants/configProperties.js new file mode 100644 index 0000000..5692673 --- /dev/null +++ b/lib/constants/configProperties.js @@ -0,0 +1,5 @@ + +module.exports = { + PAGINATION_MAX_LIMIT_KEY: "fhmbaas.pagination.maxLimit", + PAGINATION_DEFAULT_LIMIT_KEY: "fhmbaas.pagination.defaultLimit" +}; diff --git a/lib/constants/index.js b/lib/constants/index.js new file mode 100644 index 0000000..432a096 --- /dev/null +++ b/lib/constants/index.js @@ -0,0 +1,4 @@ + +module.exports = { + CONFIG_PROPERTIES: require('./configProperties') +}; diff --git a/lib/dataSourceUpdater/index.js b/lib/dataSourceUpdater/index.js new file mode 100644 index 0000000..7680fa6 --- /dev/null +++ b/lib/dataSourceUpdater/index.js @@ -0,0 +1,10 @@ +var handlers = require('./lib/handlers'); +var log = require('./lib/logger'); + +module.exports = function(logger) { + log.setLogger(logger); + + return { + handlers: handlers + }; +}; \ No newline at end of file diff --git a/lib/dataSourceUpdater/lib/handlers/index.js b/lib/dataSourceUpdater/lib/handlers/index.js new file mode 100644 index 0000000..f6f733f --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/index.js @@ -0,0 +1,7 @@ + + +module.exports = { + updateAllEnvDataSourceCache: require('./updateAllEnvDataSourceCache').updateEnvDataSourceCache, + updateSingleDataSource: require('./updateSingleDataSource'), + requestEndpointData: require('./requestEndpointData') +}; \ No newline at end of file diff --git a/lib/dataSourceUpdater/lib/handlers/processEnvDataSources.js b/lib/dataSourceUpdater/lib/handlers/processEnvDataSources.js new file mode 100644 index 0000000..72e83c1 --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/processEnvDataSources.js @@ -0,0 +1,181 @@ +var async = require('async'); +var log = require('../logger').getLogger(); +var _ = require('underscore'); +var url = require('url'); +var dataSourceServices = require('../../../services/appForms/dataSources'); +var listDeployedServices = require('../../../services/appmbaas/listDeployedServices'); +var updateSingleDataSource = require('./updateSingleDataSource'); +var mongoDbUri = require('mongodb-uri'); + + +function formatDbUri(dbConf) { + + var config = { + username: dbConf.user, + password: dbConf.pass, + database: dbConf.name, + hosts: [] + }; + + //The mongo config.host for fh-mbaas is a string of comma-separated values. + dbConf.host = dbConf.host || ""; + var hosts = dbConf.host.split(','); + config.hosts = _.map(hosts, function(host, index) { + var hostEntry = { + host: host + }; + + if (_.isArray(dbConf.port)) { + hostEntry.port = dbConf.port[index] || 27017; + } else { + hostEntry.port = dbConf.port; + } + + return hostEntry; + }); + + //Adding the replica set option if required + if (_.isString(dbConf.replicaset_name)) { + config.options = config.options || {}; + config.options.replicaSet = dbConf.replicaset_name; + } + + return mongoDbUri.format(config); +} + +/** + * Processing Data Sources For A Single Environment + * @param params + * - currentTime: The time to compare data sources with + * - envConfigEntry: Environment Configuration + * - domain: + * - env: Environment ID + * - dbConf: Mongo Connection Details + * - user: Username + * - pass: Password + * - host: Mongo Connection Host + * - port: Mongo Connection Port + * - name: Env DB Name + * @param callback + * @returns {*} + */ +function processEnvDataSources(params, callback) { + //3.a.a Get Env DB Config + //3.a.b Connect To Env Db + //3.a.c List Data Sources + //3.a.d List Deployed Services + + log.logger.debug("processEnvDataSources", params); + + var currentTime = params.currentTime; + var envConfigEntry = params.envConfigEntry; + + var dbConf = envConfigEntry.dbConf; + if (!dbConf) { + log.logger.error("No Db Config For Environment ", envConfigEntry); + return callback(new Error("No Database Config Available For Environment " + envConfigEntry.env)); + } + + envConfigEntry.dbConf.mongoUrl = formatDbUri(dbConf); + + log.logger.debug("processEnvDataSources", envConfigEntry); + + async.waterfall([ + function getEnvDataSourcesAndServices(envDataCb) { + + log.logger.debug("getEnvDataSourcesAndServices", envConfigEntry); + + async.parallel({ + dataSources: function listDataSources(dsCb) { + log.logger.debug("listDataSources", { + mongoUrl: envConfigEntry.dbConf.mongoUrl, + currentTime: currentTime + }); + //Only Want Data Sources That Need To Be Updated. I.e. lastUpdated + interval < currentTime + dataSourceServices.listForUpdate({ + mongoUrl: envConfigEntry.dbConf.mongoUrl, + currentTime: currentTime + }, dsCb); + }, + deployedServices: function listEnvDeployedServices(servCb) { + log.logger.debug("listEnvDeployedServices", { + domain: envConfigEntry.domain, + environment: envConfigEntry.environment + }); + listDeployedServices({ + domain: envConfigEntry.domain, + environment: envConfigEntry.environment + }, servCb); + } + }, function(err, dataSourcesAndServices) { + //If An Error Happens Here, Can't update the Data Source with the error. + if (err) { + log.logger.error("Error Getting Data Sources And Services ", {error: err, params: params}); + } else { + log.logger.debug("getEnvDataSourcesAndServices", dataSourcesAndServices); + } + return envDataCb(err, { + envConfigEntry: envConfigEntry, + dataSources: dataSourcesAndServices.dataSources, + deployedServices: dataSourcesAndServices.deployedServices + }); + }); + }, + function updateAllEnvDataSources(envDataSourcesAndServices, updateCb) { + //3.a.e For Each Data Source + //3.a.e.a Get Service Host + + log.logger.debug("updateAllEnvDataSources", envDataSourcesAndServices); + + async.each(envDataSourcesAndServices.dataSources, function(dataSource, cb) { + + log.logger.debug("updateAllEnvDataSources", dataSource); + var error; + var serviceGuid = dataSource.serviceGuid; + var deployedService = _.findWhere(envDataSourcesAndServices.deployedServices, {guid: serviceGuid}); + + var fullUrl; + //If there is no deployed service, update the data source to note that the service has not been deployed. + if (!deployedService) { + deployedService = {}; + error = { + userDetail: "Service is not deployed.", + systemDetail: "The Service associated with this Data Source has not been deployed to this environment.", + code: "DS_SERVICE_NOT_DEPLOYED" + }; + } else { + var serviceHost = deployedService.url; + var path = dataSource.endpoint; + fullUrl = url.resolve(serviceHost, path); + } + + log.logger.debug("updateSingleDataSource ", { + mongoUrl: envConfigEntry.dbConf.mongoUrl, + error: error, + fullUrl: fullUrl, + accessKey: deployedService.serviceAccessKey, + dataSourceId: dataSource._id + }); + + updateSingleDataSource({ + currentTime: currentTime, + mongoUrl: envConfigEntry.dbConf.mongoUrl, + error: error, + fullUrl: fullUrl, + accessKey: deployedService.serviceAccessKey, + dataSourceId: dataSource._id + }, function(err) { + if (err) { + log.logger.debug("Error Updating A Data Source ", err); + } + return cb(); + }); + + }, updateCb); + } + ], callback); + + +} + +module.exports = processEnvDataSources; diff --git a/lib/dataSourceUpdater/lib/handlers/requestEndpointData.js b/lib/dataSourceUpdater/lib/handlers/requestEndpointData.js new file mode 100644 index 0000000..7b0d5ab --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/requestEndpointData.js @@ -0,0 +1,50 @@ +var request = require('request'); +var log = require('../logger').getLogger(); +var _ = require('underscore'); + +/** + * Calling A Service Endpoint + * @param params + * - accessKey: The Access Key Needed To Call The Service + * - fullUrl: The Full URL Needed To Call The Service + * @param cb + */ +module.exports = function requstEndpointData(params, cb) { + + log.logger.debug("requstEndpointData", params); + + request.get({ + url: params.fullUrl, + headers: { + 'X-FH-SERVICE-ACCESS-KEY': params.accessKey + }, + json: true + }, function(err, httpResponse, body) { + if (err) { + log.logger.debug("Error Getting Endpoint Data ", {error: err, params: params}); + return cb(err); + } + + var error; + if (httpResponse.statusCode > 204) { + error = { + userDetail: "Invalid HTTP Response: " + httpResponse.statusCode, + systemDetail: body, + code: "INVALID_HTTP_RESPONSE" + }; + log.logger.debug("Error Getting Endpoint Data ", error, params); + } else { + //The body of the response should at least be an array + if (!_.isArray(body)) { + error = { + userDetail: "Invalid data type response.", + systemDetail: "Expected an Array but got " + typeof body, + code: "INVALID_DATA" + }; + } + log.logger.debug("Finished Getting Endpoint Data ", {body: body, params: params}); + } + + cb(error, body); + }); +}; diff --git a/lib/dataSourceUpdater/lib/handlers/updateAllEnvDataSourceCache.js b/lib/dataSourceUpdater/lib/handlers/updateAllEnvDataSourceCache.js new file mode 100644 index 0000000..be74e26 --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/updateAllEnvDataSourceCache.js @@ -0,0 +1,66 @@ +var async = require('async'); +var fhMbaasMiddleware = require('fh-mbaas-middleware'); +var log = require('../logger').getLogger(); +var processEnvDataSources = require('./processEnvDataSources'); + + +/** + * Function To Update A Set Of Data Sources For All Environments. + * + * @param params + * @param callback + */ +function updateEnvDataSourceCache(params, callback) { + //1. List All Environments And Domains + //2. Group Envs By Domain + + var currentTime = new Date(); + + log.logger.debug("updateEnvDataSourceCache ", { + currentTime: currentTime + }); + + async.waterfall([ + function listAllDomainEnvironments(cb) { + + fhMbaasMiddleware.mbaas().find({}, function(err, envConfigEntries) { + if (err) { + log.logger.error("Error Getting Mbaas Configs ", {error: err}); + } else { + log.logger.debug("listAllDomainEnvironments ", { + currentTime: currentTime, + envConfigEntries: envConfigEntries + }); + } + + cb(err, envConfigEntries || []); + }); + }, + function processsEnvData(envConfigEntries, envCb) { + //3. For Each Domain + //3.a For Each Env In Domain + log.logger.debug("processsEnvData ", { + currentTime: currentTime, + envConfigEntries: envConfigEntries + }); + + async.map(envConfigEntries, function(envConfigEntry, cb) { + + log.logger.debug("processsEnvData Single Config", { + currentTime: currentTime, + envConfigEntry: envConfigEntry + }); + processEnvDataSources({ + envConfigEntry: envConfigEntry, + currentTime: currentTime + }, cb); + }, envCb); + } + ], function(err, result) { + callback(err, result); + }); +} + +module.exports = { + updateEnvDataSourceCache: updateEnvDataSourceCache +}; \ No newline at end of file diff --git a/lib/dataSourceUpdater/lib/handlers/updateDataSourceCache.js b/lib/dataSourceUpdater/lib/handlers/updateDataSourceCache.js new file mode 100644 index 0000000..ae3529f --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/updateDataSourceCache.js @@ -0,0 +1,50 @@ +var fhForms = require('fh-forms'); +var log = require('../logger').getLogger(); +var _ = require('underscore'); + + +/** + * Updating The Cache For A Single Data Source + * @param params + * - currentTime: + * - mongoUrl: + * - dataSourceId: + * - data: [optional], + * - error: [optional] + * @param cb + */ +function updateDataSource(params, cb) { + + log.logger.debug("updateDataSource ", { + uri: params.mongoUrl + }, [{ + _id: params.dataSourceId, + data: params.data, + error: params.error + }]); + + var dsData = { + _id: params.dataSourceId + }; + + dsData.dataError = params.error; + dsData.data = params.error ? [] : params.data; + + //Need To Update The Data Source Cache + fhForms.core.dataSources.updateCache({ + uri: params.mongoUrl + }, [dsData], { + currentTime: params.currentTime + }, function(err, dsUpdateResult) { + dsUpdateResult = dsUpdateResult || {}; + if (err) { + log.logger.error("Error Updating Data Source ", {error: err, params: params}); + } else { + log.logger.debug("Data Source Updated Successully ", dsUpdateResult.validDataSourceUpdates[0]); + } + + //Cache Updated - moving on. + return cb(undefined, _.first(dsUpdateResult.validDataSourceUpdates)); + }); +} +module.exports = updateDataSource; \ No newline at end of file diff --git a/lib/dataSourceUpdater/lib/handlers/updateSingleDataSource.js b/lib/dataSourceUpdater/lib/handlers/updateSingleDataSource.js new file mode 100644 index 0000000..f50cfe1 --- /dev/null +++ b/lib/dataSourceUpdater/lib/handlers/updateSingleDataSource.js @@ -0,0 +1,60 @@ +var async = require('async'); +var requestEndpointData = require('./requestEndpointData'); +var updateDataSourceCache = require('./updateDataSourceCache'); +var log = require('../logger').getLogger(); + +/** + * Calling A Service Endpoint And Updating A Data Source + * @param params + * - currentTime: + * - fullUrl: + * - accessKey: + * - mongoUrl: + * - dataSourceId: + * - error: [optional] + * @param cb + */ +module.exports = function(params, cb) { + //3.a.e.b Call Endpoint + //3.a.e.c Call Update Data Source Cache + + log.logger.debug("updateSingleDataSource", params); + + async.waterfall([ + function getServiceData(cb) { + + //If there is an error, don't call the service. Just Update The Data Source With The Error + if (params.error) { + return cb(undefined, { + error: params.error + }); + } + + requestEndpointData({ + fullUrl: params.fullUrl, + accessKey: params.accessKey + }, function(err, returnedData) { + return cb(undefined, { + error: err, + data: returnedData + }); + }); + }, + function updateDSCache(endpointParams, cb) { + updateDataSourceCache({ + currentTime: params.currentTime, + mongoUrl: params.mongoUrl, + dataSourceId: params.dataSourceId, + data: endpointParams.data, + error: endpointParams.error + }, function(err) { + if (err) { + log.logger.error("Error Updating Data Source ", err); + } + + return cb(err); + }); + } + + ], cb); +}; \ No newline at end of file diff --git a/lib/dataSourceUpdater/lib/logger/index.js b/lib/dataSourceUpdater/lib/logger/index.js new file mode 100644 index 0000000..0b90273 --- /dev/null +++ b/lib/dataSourceUpdater/lib/logger/index.js @@ -0,0 +1,10 @@ +var log = {}; + +module.exports = { + setLogger: function(logger) { + log.logger = logger; + }, + getLogger: function() { + return log; + } +}; \ No newline at end of file diff --git a/lib/export/AppDataExportRunner.js b/lib/export/AppDataExportRunner.js new file mode 100644 index 0000000..c39e61f --- /dev/null +++ b/lib/export/AppDataExportRunner.js @@ -0,0 +1,127 @@ +var EventEmitter = require('events').EventEmitter; +var util = require('util'); + +var AppdataJobSchema = require('../models/AppdataJobSchema'); +var constants = AppdataJobSchema.statuses; +var async = require('async'); + +var preparation=require('./preparationSteps'); +var appDataExport=require('./appDataExport'); + +var CONSTANTS = require('./constants'); +const START_EVENT = require('../jobs/progressPublisher').START_EVENT; +const PROGRESS_EVENT = require('../jobs/progressPublisher').PROGRESS_EVENT; +const STATUS_EVENT = require('../jobs/progressPublisher').STATUS_EVENT; +const FINISH_EVENT = require('../jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('../jobs/progressPublisher').FAIL_EVENT; +const HEARTBEAT_EVENT = CONSTANTS.HEARTBEAT_EVENT; + +var commonJobFunctions = require('./commonJobFunctions'); + +/** + * This object represent the runner that will be in charge of exporting the application collection from the mongo + * database to the destination archive. + * + * The produced archive will be a tar file containing one bson.gz file for each collection. + * + * @param context the context of the export + * @param keepalive the keepalive timeout + * @constructor + */ +function AppDataExportRunner(context, keepalive) { + EventEmitter.call(this); + var self = this; + self.keepalive = keepalive || 30000; + + context.emitter = self; + self.context = context; +} + +util.inherits(AppDataExportRunner, EventEmitter); + +AppDataExportRunner.prototype.run = function() { + + var logger = this.context.logger; + logger.info('Application export started'); + + if (!this.context.appInfo) { + logger.warn('Application not found. Aborting'); + + this.emit(FAIL_EVENT, 'Export failed. Application not found'); + return; + } + + if (this.status === constants.FINISHED || this.status === constants.FAILED) { + logger.info('[APPDATAEXPORT] Application export finished'); + this.emit(STATUS_EVENT, this.appInfo, this.status); + return; + } + if (this.status === constants.INPROGRESS) { + logger.info('[APPDATAEXPORT] Export already in progress, aborting'); + + this.emit(FAIL_EVENT, 'Export failed due to export already in progress'); + return; + } + + startExport.bind(this)(); +}; + + +/** + * Starts the application data export process. + * The method pass a 'context' object to the function composing the export flow. Such context will be shared among all + * the functions, and each function will be able to enrich it with new informations. + */ +function startExport() { + var self = this; + self.interval = this.heartbeat(); + var context = self.context; + + var logger = context.logger; + + self.emit(START_EVENT, context.appInfo, 'Export started'); + async.waterfall([ + async.apply(preparation.prepare, context), + // Real export + appDataExport.exportData.bind(self), + commonJobFunctions.registerStorage, + commonJobFunctions.updateModelWithStorageData + ], function(err) { + if (self.interval) { + clearInterval(self.interval); + } + if (err) { + logger.error('Export failed', {err: err}); + self.emit(FAIL_EVENT, err, context.appInfo); + } else { + logger.info('Export finished'); + self.emit(FINISH_EVENT, context.appInfo, 'Export finished'); + + // Make sure to update the `progress` field on success. + self.emit(PROGRESS_EVENT, constants.FINISHED, context.progress.total, context.progress.total); + } + + commonJobFunctions.cleanUp(context, function(cleanUpError) { + if (cleanUpError) { + logger.error('Error cleaning up after exporting', {err: cleanUpError}); + } else { + logger.info('Cleanup executed'); + } + }); + }); +} + +AppDataExportRunner.prototype.heartbeat = function() { + var self = this; + return setInterval(function() { + self.context.jobModel.markModified('modified'); + self.context.jobModel.save(function(err) { + if (err) { + self.context.logger.error('Failed to save job', {err: err}); + } + }); + self.emit(HEARTBEAT_EVENT); + }, self.keepalive); +}; + +module.exports.AppExportRunner = AppDataExportRunner; diff --git a/lib/export/agenda.js b/lib/export/agenda.js new file mode 100644 index 0000000..8541c8b --- /dev/null +++ b/lib/export/agenda.js @@ -0,0 +1,37 @@ +var fhconfig = require('fh-config'); +var logger = require('../util/logger').getLogger(); +var os = require('os'); +var Agenda = require('fh-agenda'); + +var cleanerJob = require('../jobs/appDataExportCleanerJob').cleanerJob; +var appdataScheduler = require('../jobs/appDataRunnerJob'); +var stalledJobsFinder = require('../jobs/appDataStalledJobsFinder'); + +var agenda; + +function getAgendaInstance() { + if (!agenda) { + var mongoConn = fhconfig.mongoConnectionString('mongo'); + logger.debug('agenda mongo connection = ' + mongoConn); + + //Even though the collection is called appDataExportJobs , the submission export jobs are maintained there also. + agenda = new Agenda({ + db: {address: mongoConn, collection: 'appDataExportImportJobs-' + os.hostname()}, + name: 'AppDataExportAgenda-' + os.hostname() + '-' + process.pid, + defaultConcurrency: 1 + }, function() { + require('../jobs/submissions/submissionExportJob')(agenda); + + appdataScheduler(agenda, fhconfig.value('fhmbaas.appdata_jobs.scheduler')); + stalledJobsFinder(agenda, fhconfig.value('fhmbaas.appdata_jobs.stalled_job_finder')); + cleanerJob(agenda, fhconfig.value('fhmbaas.appdataexport.cleaner')); + + agenda.on('fail', function(err, job) { + logger.warn("app data job failure", {err: err, job: job}); + }); + }); + } + return agenda; +} + +module.exports=getAgendaInstance; \ No newline at end of file diff --git a/lib/export/appDataExport.js b/lib/export/appDataExport.js new file mode 100644 index 0000000..efdcb53 --- /dev/null +++ b/lib/export/appDataExport.js @@ -0,0 +1,72 @@ +var async = require('async'); + +const mongoExportFunctions = require('./mongoExportFunctions'); + +/** + * Strip off the ditch prefix as it does not contain any information that is needed + * to import the app. + * + * The ditch prefix consinst of + * (0) fh + * (1) -- + * (2) collection name + * + * and the separator is underscore. We only want (2). However the collecion name can also contain + * underscore, so whe can't just split and take the last item. + * + * @param collectionName Original collection name + */ +function formatCollectionName(collectionName) { + if (collectionName && collectionName.indexOf("_") >= 0) { + var parts = collectionName.split("_"); + + // At least two prefix items and one collection name + // There can be more items if the collection name contains underscores + if (parts.length >= 3) { + return parts.splice(2).join("_"); + } + + return collectionName; + } else { + return collectionName; + } +} + +/** + * This function triggers the export. + * + * @param context the applicaton data export context + * @param cb + */ +function performExport(context, cb) { + var logger = context.logger; + + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + context.archive = { + gzipFiles: [] + }; + + async.eachSeries(context.collections, function(collectionName, callback) { + context.progress.next(); + var targetFileName = formatCollectionName(collectionName) + '.bson.gz'; + mongoExportFunctions.mongoExportWrapper(context, collectionName, targetFileName, function(err) { + context.progress.next(); + callback(err); + }); + + }, function(err) { + if (!err) { + mongoExportFunctions.createExportArchive(context, function(err) { + cb(err, context); + }); + } else { + logger.error('Error exporting from mongo', err); + return cb(err, context); + } + }); +} + +module.exports.exportData = performExport; +module.exports.formatCollectionName = formatCollectionName; diff --git a/lib/export/appDataExportController.js b/lib/export/appDataExportController.js new file mode 100644 index 0000000..1932478 --- /dev/null +++ b/lib/export/appDataExportController.js @@ -0,0 +1,112 @@ +var logger = require('../util/logger').getLogger(); +var models = require('fh-mbaas-middleware').models; +var async = require('async'); + +var AppdataJobSchema = require('../models/AppdataJobSchema'); +var JOB_TYPES = require('../models/BaseImportExportJobSchema').types; +var AppdataJob = require('../models').AppdataJob; + +var status = AppdataJobSchema.statuses; +/** + * Checks that required parameters are passed om + * @param params the parameters + * @returns {*} The error message or null if no errors + */ +function validateParams(params) { + if (!params.domain) { + return 'No domain specified'; + } + if (!params.environment) { + return 'No environment specified'; + } + if (!params.appid) { + return 'No app guid specified'; + } + + return null; +} + +function createExportJob(params, cb) { + var job = new AppdataJob(); + job.jobType = AppdataJobSchema.types.EXPORT; + job.domain = params.domain; + job.environment = params.environment; + job.appid = params.appid; + job.metadata = { + fileSize: 0, + fileDeleted: null, + filePath: null, + fileId: null, + + // Store the value here so that we can stop the app when + // the job gets actually started + stopApp: params.stopApp + }; + + job.save(function(err) { + return cb(err, job); + }); +} + +function checkAppExists(appId, env, cb) { + var AppMbaasModel = models.getModels().AppMbaas; + AppMbaasModel.findOne({guid: appId, environment: env}, function(err, app) { + if (err) { + return cb(err); + } + + if (!app) { + return cb('No application with id "' + appId + '" could be found'); + } else { + return cb(); + } + }); +} + +module.exports = { + startExport: function(params, callback) { + logger.info('Got export request', params); + var valid_error = validateParams(params); + if (valid_error) { + logger.info('Parameter validation failed', valid_error); + return callback({code: 400, message: valid_error}); + } + var appId = params.appid; + var env = params.environment; + + async.waterfall([ + async.apply(checkAppExists, appId, env), + async.apply(AppdataJob.findOne.bind(AppdataJob), { + jobType: JOB_TYPES.EXPORT, + appid: appId, + status: { + $in: [status.QUEUED, status.INPROGRESS] + } + }) + ], function(err, task) { + if (err) { + logger.error('[APPDATAEXPORT] Error searching existing tasks', err); + return callback(err); + } + + logger.debug('task : ' + task); + + if (task) { + logger.warn('[%s] export is already in progress', appId); + return callback({code: 409, message: 'Export already in progress'}); + } + + logger.debug('[%s] Creating export task db model', appId, params); + createExportJob(params, function(err, task) { + if (err) { + logger.error('[APPDATAEXPORT] Error creating task', err); + return callback(err); + } + logger.debug('[%s] model saved. Run agenda task now', appId); + + logger.debug('[%s] export task created. jobId = %s', appId, task._id.toString()); + return callback(null, task.toJSON()); + }); + }); + } +}; diff --git a/lib/export/cleaner/appDataExportCleanerRunner.js b/lib/export/cleaner/appDataExportCleanerRunner.js new file mode 100644 index 0000000..0b13b2d --- /dev/null +++ b/lib/export/cleaner/appDataExportCleanerRunner.js @@ -0,0 +1,176 @@ +var fs = require('fs'); +var util = require('util'); +var EventEmitter = require('events').EventEmitter; +var async = require('async'); +var fhConfig = require('fh-config'); +var log = require('../../util/logger'); +var storage = require('../../storage/index'); +var path = require('path'); +var rimraf = require('rimraf'); + +const FINISH_EVENT = require('../../jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('../../jobs/progressPublisher').FAIL_EVENT; +const PROGRESS_EVENT = require('../../jobs/progressPublisher').PROGRESS_EVENT; + +var ExportJob = require('../../models/index').AppdataJob; + +var DEFAULT_GRACE_TIME = 10; + +var GRACE_TIME = fhConfig.value('fhmbaas.appdataexport.cleaner.grace_time') || DEFAULT_GRACE_TIME; + +/** + * Updates the file deletion status into the database + * + * @param doc the document to be updated + * @param cb + */ +function updateStatus(context, doc, cb) { + doc.updateMetadata('fileDeleted', true); + doc.save(function(err) { + if (err) { + context.logger.error('Error updating delete status', {err: err, path: doc.metadata.filePath, id: doc.id}); + } + cb(err); + }); +} + +/** + * Deletes the file associated with the document from the filestore + * + * @param doc the document + * @param cb + * @returns {*} + */ +function deleteFileStore(doc, cb) { + var self = this; + + if (!doc.metadata.fileId) { + // No filestore to be deleted... + self.context.logger.warn('FileID not set into the exportjob object'); + return cb(); + } + + self.emit(PROGRESS_EVENT, 'Deleting filestore ' + doc.metadata.fileId); + + storage.deleteFile(doc.metadata.fileId, function(err) { + if (err) { + self.context.logger.error('Error deleting filestore', {err: err, path: doc.metadata.filePath, id: doc.id}); + } + cb(err); + }); +} + +function deletePathRecursive(context, doc, cb) { + + if (!doc.metadata.filePath) { + context.logger.warn('Filepath is null', {doc: doc}); + + // Not returning any error: we want the fileDeleted flag to be changed to 'true' to stop the cleaner + // working on this document. + return cb(); + } + + var parent = path.dirname(doc.metadata.filePath); + + context.logger.info('Deleting export directory', {path: parent}); + + async.series([ + async.apply(fs.stat, parent), + async.apply(rimraf, parent) + ], function(err) { + if (err) { + context.logger.error('Error deleting directory', {path: parent, err: err}); + } + cb(err); + }); +} + +/** + * Orchestrate the file deletion flow for a single document. + * + * @param doc the document attached to the file to be deleted. + * @param cb + */ +function deleteFile(doc, cb) { + + var self = this; + + self.context.logger.info('Deleting expired file', {id: doc.id, path: doc.metadata.filePath, date: doc.created}); + + self.emit(PROGRESS_EVENT, 'Deleting file ' + doc.metadata.filePath); + + async.series([ + async.apply(deletePathRecursive, self.context, doc), + async.apply(updateStatus.bind(self), self.context, doc), + async.apply(deleteFileStore.bind(self), doc) + ], function(err) { + if (err) { + self.context.logger.warn('Error deleting file', {err:err, id: doc.id, path: doc.metadata.filePath, date: doc.created}); + } + + // do not send back the error: we want the deletion flow to keep on with the other files. + cb(); + }); +} + +/** + * Cleaning job constructor. + * + * @param context the context of the execution. It can contains a custom logger and a custom query to be used to + * retrieve the documents. + * + * @constructor + */ +function AppDataExportCleanerRunner(context) { + this.context = context; + if (!this.context.logger) { + this.context.logger = log.getLogger(); + } +} + +util.inherits(AppDataExportCleanerRunner, EventEmitter); + +/** + * Runs the cleaning job. + */ +AppDataExportCleanerRunner.prototype.run = function() { + + var self = this; + + var olderThanDate = new Date(); + olderThanDate.setDate(olderThanDate.getDate() - GRACE_TIME); + + var statusFilter = { $or: [ {status: 'complete'}, {status: 'failed'}] }; + var fileDeletedFilter = { $or: [ { 'metadata.fileDeleted': { $exists: false}}, { 'metadata.fileDeleted': { $ne: true}} ] }; + + var query = self.context.query || { + created: {$lte: olderThanDate }, + $and: [statusFilter, fileDeletedFilter] + }; + + ExportJob.find(query, function(err, docs) { + if (err) { + self.context.logger.error('Error executing ExportJob query', err); + return self.emit(FAIL_EVENT, err); + } + + if (!docs || docs.length === 0) { + self.context.logger.info('No expired documents found'); + return self.emit(FINISH_EVENT); + } + + // Limit the number of files to delete at the same time + async.eachLimit(docs, + 10, + deleteFile.bind(self), + function(err) { + if (err) { + self.emit(FAIL_EVENT, err); + } else { + self.emit(FINISH_EVENT, err); + } + }); + }); +}; + +module.exports.AppDataExportCleanerRunner = AppDataExportCleanerRunner; \ No newline at end of file diff --git a/lib/export/commonJobFunctions/cleanUp.js b/lib/export/commonJobFunctions/cleanUp.js new file mode 100644 index 0000000..d8b752d --- /dev/null +++ b/lib/export/commonJobFunctions/cleanUp.js @@ -0,0 +1,49 @@ +var async = require('async'); +var path = require('path'); +var exec = require('child_process').exec; + +/** + * Remove all the temporary files. + * + * @param context the context of the current export process. + * @param cb + * @returns {*} + */ +function removeTempFiles(context, cb) { + if (!context.path) { + // Nothing to clean + return cb(); + } + + var command = 'rm -f ' + path.join(context.path, '*.gz'); + exec(command, cb); +} + +/** + * + * Generic cleanup module for export jobs. + * + * @param context + * @param cb + * @returns {*} + */ +module.exports = function cleanUp(context, cb) { + var logger = context.logger; + logger.info('Cleaning up'); + if (context.db) { + async.parallel( + [ + context.db.close.bind(context.db), + async.apply(removeTempFiles, context) + ], function(err) { + if (err) { + logger.error('Error cleaning up', {err: err}); + } + return cb(err); + } + ); + } else { + // If the DB is not connected there is nothing to clean up + return cb(); + } +}; \ No newline at end of file diff --git a/lib/export/commonJobFunctions/index.js b/lib/export/commonJobFunctions/index.js new file mode 100644 index 0000000..7142efa --- /dev/null +++ b/lib/export/commonJobFunctions/index.js @@ -0,0 +1,7 @@ + + +module.exports = { + cleanUp: require('./cleanUp'), + updateModelWithStorageData: require('./updateModelWithStorageData'), + registerStorage: require('./registerStorage') +}; \ No newline at end of file diff --git a/lib/export/commonJobFunctions/registerStorage.js b/lib/export/commonJobFunctions/registerStorage.js new file mode 100644 index 0000000..c7b6568 --- /dev/null +++ b/lib/export/commonJobFunctions/registerStorage.js @@ -0,0 +1,23 @@ +var storage = require('../../storage'); +/** + * Common function to register a file for export + * @param context + * @param cb + */ +module.exports = function registerStorage(context, cb) { + + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + var logger = context.logger; + logger.info('Registering file into the storage', {path: context.archive.path}); + storage.registerFile(context.archive.path, function(err, fileModel) { + if (err) { + logger.error('Failed registering into the storage', {path: context.archive.path, err: err}); + return cb(err); + } + + context.archive.fileId = fileModel.id; + return cb(null, context); + }); +}; \ No newline at end of file diff --git a/lib/export/commonJobFunctions/updateModelWithStorageData.js b/lib/export/commonJobFunctions/updateModelWithStorageData.js new file mode 100644 index 0000000..a9c65f0 --- /dev/null +++ b/lib/export/commonJobFunctions/updateModelWithStorageData.js @@ -0,0 +1,21 @@ +/** + * Common module to update storage data associated with an export + * @param context + * @param cb + */ +module.exports = function updateModelWithStorageData(context, cb) { + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + var logger = context.logger; + logger.info('Storing storage pointer inside the job model', {fileId: context.archive.fileId}); + var exportJob = context.jobModel; + exportJob.updateMetadata("fileId", context.archive.fileId); + exportJob.save(function(err) { + if (err) { + logger.error('Failed storing storage pointer inside the job model', {path: context.archive.fileId, err: err}); + } + + return cb(err); + }); +}; \ No newline at end of file diff --git a/lib/export/commonPreparationSteps/connectToDatabase.js b/lib/export/commonPreparationSteps/connectToDatabase.js new file mode 100644 index 0000000..fc290a6 --- /dev/null +++ b/lib/export/commonPreparationSteps/connectToDatabase.js @@ -0,0 +1,26 @@ +'use strict'; + +const MongoClient = require('mongodb').MongoClient; + + +/** + * Connects to the database. + * + * @param context an object containing the details of the collections to be exported: + * - mongoUri : the URI to be used to connect to the mongo db + * - collections : the name of the collections. + * - exportJob : a database object to be used to persist export info + * INPUT: context {exportJob:exportJob, uri: mongoUri, collections: collections} + * OUTPUT: err, context {exportJob: exportJob, uri: mongoUri, collections: collections, db: mongoDbConnection } + * @param cb the callback. + */ +function connectToDb(context, cb) { + const logger = context.logger; + logger.info('Connecting to app database', {uri: context.uri}); + MongoClient.connect(context.uri, function(err, db) { + context.db = db; + cb(err, context); + }); +} + +module.exports = connectToDb; diff --git a/lib/export/commonPreparationSteps/createOutputDir.js b/lib/export/commonPreparationSteps/createOutputDir.js new file mode 100644 index 0000000..5cbfcfb --- /dev/null +++ b/lib/export/commonPreparationSteps/createOutputDir.js @@ -0,0 +1,26 @@ +var mkdirp = require('mkdirp'); + +/** + * Creates the export directory to be used to save the exported files. + * + * @param context + * @param cb the callback + */ +function createOutputDir(context, cb) { + + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + var logger = context.logger; + logger.info('Creating output directory', context.outputPath); + mkdirp(context.outputPath, function(err) { + if (err) { + return cb(err); + } + + context.path = context.outputPath; + return cb(err, context); + }); +} + +module.exports = createOutputDir; diff --git a/lib/export/commonPreparationSteps/index.js b/lib/export/commonPreparationSteps/index.js new file mode 100644 index 0000000..b7fbe5c --- /dev/null +++ b/lib/export/commonPreparationSteps/index.js @@ -0,0 +1,7 @@ + +module.exports = { + connectToDatabase: require('./connectToDatabase'), + retrieveCollectionsSize: require('./retrieveCollectionSize'), + reserveSpaceIfAvailable: require('./reserveSpace'), + createOutputDir: require('./createOutputDir') +}; diff --git a/lib/export/commonPreparationSteps/reserveSpace.js b/lib/export/commonPreparationSteps/reserveSpace.js new file mode 100644 index 0000000..f9de44d --- /dev/null +++ b/lib/export/commonPreparationSteps/reserveSpace.js @@ -0,0 +1,87 @@ +var async = require('async'); +var diskspace = require('diskspace'); +var baseJobSchema = require("../../models/BaseImportExportJobSchema")(); +var jobStates = baseJobSchema.statuses; +var jobTypes = baseJobSchema.types; + +function retrieveTotalReservedSpace(context, job, cb) { + var logger = context.logger; + logger.info('Retrieving already allocated space'); + job.aggregate([{ + $match: { + $and: [{ + $or: [{status: jobStates.INPROGRESS}, {status: jobStates.QUEUED}] + }, { + jobType: jobTypes.EXPORT + }] + } + }, { + $group: { + _id: null, + total: {$sum: "$metadata.fileSize"} + } + }], function(err, result) { + if (err) { + return cb(err); + } + + return cb(null, result[0] ? result[0].total : 0); + }); +} + +/** + * Reserve the space for the export by setting the size attribute inside the task object. + * + * @param job the Job type (App or Submission) + * @param context an object containing the details of the app to be exported: + * - db : connection to the app mongo database + * - collections : the names of the collections owned by the application. + * - size : total size in byte of the application collections + * INPUT: context {exportJob:exportJob,db: mongoConnection, collections: appCollections, size: totalSize} + * OUTPUT: err, context {exportJob:exportJob,db: mongoConnection, collections: appCollections, size: totalSize } + * @param cb the callback + */ +function reserveSpaceIfAvailable(job, context, cb) { + + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + var logger = context.logger; + logger.info('Reserving space'); + var outDir = context.outputDir; + + async.waterfall( + [ + async.apply(retrieveTotalReservedSpace, context, job), + function(totalReservedSpace, callback) { + diskspace.check(outDir, function(err, total, free) { + if (err) { + return callback(err); + } + callback(null, free - totalReservedSpace); + }); + } + ], function(err, freeSpace) { + if (err) { + logger.error('Error detecting free space', err); + return cb(err); + } + if (freeSpace < context.size) { + return cb('No enough free space. Required: ' + context.size + ' available : ' + freeSpace); + } + // Reserve free space... + var exportJob = context.jobModel; + + exportJob.updateMetadata("fileSize", context.size); + exportJob.save(function(err) { + if (err) { + logger.error('Error updating export size to the database', {err: err}); + } + return cb(err, context); + }); + + } + ); +} + +module.exports = reserveSpaceIfAvailable; diff --git a/lib/export/commonPreparationSteps/retrieveCollectionSize.js b/lib/export/commonPreparationSteps/retrieveCollectionSize.js new file mode 100644 index 0000000..58eedde --- /dev/null +++ b/lib/export/commonPreparationSteps/retrieveCollectionSize.js @@ -0,0 +1,37 @@ +var async = require('async'); + +/** + * Computes the total size of collections (in bytes). + * + * @param context an object containing the details of the app to be exported: + * - db : connection to the app mongo database + * - collections : the names of the collections owned by the application. + * - exportJob : a database object to be used to persist export info + * INPUT: context {exportJob: exportJob, db: mongoConnection, collections: appCollections} + * OUTPUT: err, context {exportJob:exportJob, db: mongoConnection, collections: collections, size: totalSize } + * @param cb the callback + */ +function retrieveCollectionsSize(context, cb) { + // Wrap the callback so that progress is automatically sent + cb = context.progress.wrappCallback(cb); + + var logger = context.logger; + logger.info('Retrieving collections size'); + context.size = 0; + + async.eachLimit(context.collections, 10, function(collectionName, callback) { + context.db.collection(collectionName).stats(function(err, st) { + if (err) { + logger.error('Failure getting collection size', {collectionName: collectionName, err: err}); + } else { + context.size += st.size; + } + callback(err); + }); + }, function(err) { + logger.info('Estimated export size', {size: context.size}); + return cb(err, context); + }); +} + +module.exports = retrieveCollectionsSize; diff --git a/lib/export/constants.js b/lib/export/constants.js new file mode 100644 index 0000000..1eb5740 --- /dev/null +++ b/lib/export/constants.js @@ -0,0 +1,3 @@ + +module.exports.MONGO_DEFAULT_PORT = 27017; +module.exports.HEARTBEAT_EVENT = 'heartbeat'; \ No newline at end of file diff --git a/lib/export/index.js b/lib/export/index.js new file mode 100644 index 0000000..6501f23 --- /dev/null +++ b/lib/export/index.js @@ -0,0 +1,4 @@ + +module.exports = { + scheduler: require('./agenda') +}; \ No newline at end of file diff --git a/lib/export/mongoExportFunctions/constants.js b/lib/export/mongoExportFunctions/constants.js new file mode 100644 index 0000000..ac417b7 --- /dev/null +++ b/lib/export/mongoExportFunctions/constants.js @@ -0,0 +1,2 @@ + +module.exports.MONGO_DEFAULT_PORT = 27017; \ No newline at end of file diff --git a/lib/export/mongoExportFunctions/createExportArchive.js b/lib/export/mongoExportFunctions/createExportArchive.js new file mode 100644 index 0000000..feadb71 --- /dev/null +++ b/lib/export/mongoExportFunctions/createExportArchive.js @@ -0,0 +1,75 @@ +var async = require('async'); +var child_process = require('child_process'); +var path = require('path'); +const fs = require('fs'); + +/** + * This function spawns the 'tar' command to put all the produced bson.gz files inside one single tar file. + * + * @param context + * @param context.path the 'current working directory'. It must be the directory containing the bson.gz files. + * @param context.archive.collectionFiles The lis of files to be put inside the tar file. + * @param outputFileName the file to be produced. + * @param cb + */ +function tar(context, outputFileName, cb) { + var logger = context.logger; + + var cwd = context.path; + var collectionFiles = context.archive.gzipFiles; + + var tarParams = ['cf', outputFileName].concat(collectionFiles); + + var tarProcess = child_process.spawn('tar', tarParams, {cwd: cwd}); + + tarProcess.on('close', function(code) { + if (code === 0) { + logger.info('TAR file created', {cwd: cwd, filename: outputFileName}); + return cb(null); + } else { + logger.info('Error preparing TAR file', {cwd: cwd, filename: outputFileName, code: code}); + return cb('Error preparing TAR file. Code: ' + code); + } + }).on('error', function(err) { + logger.error('Error preparing TAR file', {cwd: cwd, filename: outputFileName, err: err}); + return cb(err); + }); +} + +/** + * Takes from the context the list of created GZ files containing the app collections and put them inside the + * destination tar file. + * @param context the context + * @param cb + */ +module.exports = function createExportArchive(context, cb) { + var logger = context.logger; + + logger.info('Creating export archive', {outDir: context.path, fileName: 'export.tar'}); + async.series([ + async.apply(tar, context, 'export.tar'), + async.apply(fs.stat, path.join(context.path, 'export.tar')) + ], function(err, stats) { + if (!err) { + context.archive = { + path: path.join(context.path, 'export.tar'), + size: stats[1].size + }; + + // updating job data + var exportJob = context.jobModel; + exportJob.updateMetadata("filePath", context.archive.path); + exportJob.updateMetadata("fileSize", context.archive.size); + exportJob.save(function(err) { + if (err) { + logger.error('Error updating file and size in mongo', {err: err}); + } + + return cb(err, context); + }); + } else { + logger.error('Error creating TAR archive', err); + return cb(err, context); + } + }); +}; \ No newline at end of file diff --git a/lib/export/mongoExportFunctions/getSecondaryReplSetHost.js b/lib/export/mongoExportFunctions/getSecondaryReplSetHost.js new file mode 100644 index 0000000..375d400 --- /dev/null +++ b/lib/export/mongoExportFunctions/getSecondaryReplSetHost.js @@ -0,0 +1,46 @@ +const CONSTANTS = require('./constants'); +const exec = require('child_process').exec; + +/** + * Return the host name of the secondary replica set (if possible). It is ok + * to use `child_process#exec` here because the expected output is either an + * error message or very small. + * + * The command that is run on mongo is using the `rs#status` method to get + * replica set data. It will first check if the `members` property is present. + * If not then this setup does not have replica sets. + */ +module.exports = function getSecondaryReplSetHost(cb) { + var command = [ + 'mongo --quiet --eval', + ' "this.rs.status().members && this.rs.status().members.filter(function (x) { return x.state === 2 })[0].name"' + ].join(''); + + exec(command, function(err, stdout, stderr) { + if (err) { + return cb(err); + } + + // If the mongo db does not have replica sets, an error will + // be written to stderr + if (stderr || !stdout) { + return cb(null, null); + } + + var result = { + port: CONSTANTS.MONGO_DEFAULT_PORT + }; + + // URL can also contain the port. Can't use `url#parse` here because its + // not a complete URL. + if (stdout.indexOf(":") > 0) { + var parts = stdout.split(":"); + result.host = parts[0].trim(); + result.port = parts[1].trim(); + } else { + result.host = stdout.trim(); + } + + return cb(null, result); + }); +}; diff --git a/lib/export/mongoExportFunctions/index.js b/lib/export/mongoExportFunctions/index.js new file mode 100644 index 0000000..aba29b2 --- /dev/null +++ b/lib/export/mongoExportFunctions/index.js @@ -0,0 +1,6 @@ + +module.exports = { + createExportArchive: require('./createExportArchive'), + getSecondaryReplSetHost: require('./getSecondaryReplSetHost'), + mongoExportWrapper: require('./mongoExportWrapper') +}; \ No newline at end of file diff --git a/lib/export/mongoExportFunctions/mongoExportWrapper.js b/lib/export/mongoExportFunctions/mongoExportWrapper.js new file mode 100644 index 0000000..5490894 --- /dev/null +++ b/lib/export/mongoExportFunctions/mongoExportWrapper.js @@ -0,0 +1,78 @@ +var mongoDbUri = require('mongodb-uri'); +const zlib = require('zlib'); +var child_process = require('child_process'); +var path = require('path'); +const fs = require('fs'); + +var getSecondaryReplSetHost = require('./getSecondaryReplSetHost'); + +const CONSTANTS = require('./constants'); + +/** + * This function spawns the mongodump command to export each single collection composing the application data. + * @param context the export operation context + * @param collectionName the name of the collection to be exported + * @param targetFileName the file to export to + * @param cb + */ +module.exports = function mongoExportWrapper(context, collectionName, targetFileName, cb) { + var logger = context.logger; + + var uriObject = mongoDbUri.parse(context.uri); + + logger.info('Exporting collection', {name: collectionName}); + + getSecondaryReplSetHost(function(err, result) { + if (err) { + logger.error('Error querying replica sets', {err: err}); + return cb(err); + } + + // If we are in a setup with replica sets the host variable will be set to + // the host of the secondary instance. We prefer to run mongodump there. If + // it is not set we just use the host from the URL. + var dumpHost = (result && result.host) || uriObject.hosts[0].host; + var dumpPort = (result && result.port) || uriObject.hosts[0].port || CONSTANTS.MONGO_DEFAULT_PORT; + + logger.info('Using host ' + dumpHost + ':' + dumpPort + ' to run mongodump'); + + var mongodump = child_process.spawn('mongodump', + [ + '--host', dumpHost, + + // Use the Mongodb default port if the uri does not contain any port information + // This is in line with the Mongodb connection string format: + // https://docs.mongodb.com/manual/reference/connection-string/ + '--port', dumpPort, + '-u', uriObject.username, + '-p', uriObject.password, + '-d', uriObject.database, + '-c', collectionName, + '-o', '-' + ]); + + var targetFilePath = path.join(context.path, targetFileName); + var outStream = fs.createWriteStream(targetFilePath); + + var gzip = zlib.createGzip(); + + //Listening for the close event on the file stream, as the pipe chain can only be considered complete when all files are written to disk + //If an attempt to tar the gz files as they are changing, it will cause error code 1 (See https://www.gnu.org/software/tar/manual/html_section/tar_19.html) + outStream.on('close', function() { + logger.info('Collection exported', {collectionName: collectionName}); + + //Flush may be redundant here as the stream has completed all the way to disk and the file handle has closed. + gzip.flush(function() { + context.archive.gzipFiles.push(targetFileName); + return cb(null); + }); + + }).on('error', function(err) { + logger.error('Error exporting collection', {collectionName: collectionName, err: err}); + return cb(err); + }); + + //Piping after registering events in case of an immediate emittion of an error. + mongodump.stdout.pipe(gzip).pipe(outStream); + }); +}; diff --git a/lib/export/preparationSteps.js b/lib/export/preparationSteps.js new file mode 100644 index 0000000..423123d --- /dev/null +++ b/lib/export/preparationSteps.js @@ -0,0 +1,173 @@ +var async = require('async'); +var ditchhelper = require('../util/ditchhelper'); +var AppdataJob = require('../models').AppdataJob; +var path = require('path'); +var common = require('../util/common'); +var dfutils = require('../util/dfutils'); +var _ = require('underscore'); +var commonPreparationSteps = require('./commonPreparationSteps'); + +/** + * This function retrieves the connection data for a given application. + * If the application has been already migrated, the connection data is inside the appInfo object, otherwise + * a fh-ditch endpoint must be called. + * INPUT: {appInfo: appInfo, exportJob: exportJob} + * OUTPUT: {appInfo: appInfo, exportJob: exportJob, uri: mongoURI, collections: appCollections} + * @param context the context of the execution: {appInfo: appInfo} + * @param cb the callback + * @returns {*} + */ +function retrieveConnectionData(context, cb) { + var appInfo = context.appInfo; + var logger = context.logger; + + logger.info('Retrieving database connection data'); + + // getting database connection info + if (appInfo.dbConf) { + context.uri = common.formatDbUri(appInfo.dbConf); + return cb(null, context); + } else { + logger.info("App Not migrated. Invoking fh-ditch"); + return ditchhelper.getAppInfo(appInfo.name, function(err, data) { + if (err) { + logger.error('Error invoking ditch', {err: err}); + return cb(err, context); + } + logger.debug('Answer from ditch received', {data: data}); + context.uri = data.uri; + context.collections = data.collections; + return cb(null, context); + }); + } +} + +/** + * Retrieve, if needed (migrated apps) the names of the callections composing the app. + * + * @param context an object containing the details of the app to be exported: + * - appInfo: the application info object + * - db : connection to the app mongo database + * - collections : the names of the collections owned by the application. + * - exportJob : a database object to be used to persist export info + * INPUT: context {appInfo: self.appInfo, exportJob:exportJob, db: mongoConnection, collections: appCollections} + * OUTPUT: err, context {appInfo: self.appInfo, exportJob: exportJob, db: mongoConnection, collections: appCollections } + * @param cb the callback. + */ +function retrieveCollectionsNames(context, cb) { + var logger = context.logger; + logger.info('Retrieving collections names'); + if (!context.appInfo.migrated) { + + // If the app is not migrated, the list of collections has already been returned by ditch (#retrieveConnectionData) + if (context.collections.length === 0) { + return cb('No collection found for app' + context.appInfo.name, context); + } else { + return cb(null, context); + } + } + + var db = context.db; + db.listCollections().toArray(function(err, collections) { + if (err) { + return cb(err); + } + + context.collections = []; + + _.each(collections, function(obj) { + if (obj.name.indexOf('system.') !== 0) { + context.collections.push(obj.name); + } + }); + + logger.debug('Collections retrieved', {collections: context.collections}); + + if (context.collections.length === 0) { + cb('No collection found for app' + context.appInfo.name, context); + } else { + cb(null, context); + } + }); +} + +function initializeProgress(context, cb) { + // We send a progress event before and after each collection: context.collections.length * 2 + // We still have 4 preparation steps and 3 steps after the export + context.progress.total = context.collections.length * 2 + 7; + + cb(null, context); +} + +/** + * Creates a path specific to app exports and adds the path to the context + */ +function addOutputPathToContext(context, cb) { + var appInfo = context.appInfo; + var parent = context.outputDir; + context.outputPath = path.join(parent, appInfo.guid, appInfo.environment, context.jobID); + cb(null, context); +} + +/** + * Check the value of the `stopApp` parameter and if it is set to `true` + * use dfc to stop the app before running the export. This is to prevent + * users from adding new data while the export is running. + * + * @param context + * @param cb + */ +function stopApp(context, cb) { + var exportJob = context.jobModel; + + // wrap the callback so that the progress is sent automatically + cb = context.progress.wrappCallback(cb); + + var stopApp = exportJob.metadata.stopApp + , domain = exportJob.domain + , env = exportJob.environment + , appid = exportJob.appid; + + if (stopApp) { + // dfc expects the app name to be the combination of + // -- + var appName = [domain, appid, env].join("-"); + + context.logger.info('Stopping ' + appName + ' before running export'); + + dfutils.stopApp(domain, env, appName, function(err) { + if (err) { + return cb(err); + } + + return cb(null, context); + }); + } else { + + return cb(null, context); + } +} + +/** + * Implements the preparation steps flow. + * + * @param context the application data export context + * @param cb + */ +function prepare(context, cb) { + async.waterfall([ + async.apply(retrieveConnectionData, context), + commonPreparationSteps.connectToDatabase, + retrieveCollectionsNames, + initializeProgress, + commonPreparationSteps.retrieveCollectionsSize, + async.apply(commonPreparationSteps.reserveSpaceIfAvailable, AppdataJob), + addOutputPathToContext, + commonPreparationSteps.createOutputDir, + stopApp + ], function(err) { + return cb(err, context); + }); +} + +module.exports.prepare = prepare; diff --git a/lib/export/submissions/SubmissionExportRunner.js b/lib/export/submissions/SubmissionExportRunner.js new file mode 100644 index 0000000..7179c49 --- /dev/null +++ b/lib/export/submissions/SubmissionExportRunner.js @@ -0,0 +1,117 @@ +var EventEmitter = require('events').EventEmitter; +var util = require('util'); + +var SubmissionExportJobSchema = require('../../models/SubmissionDataJobSchema'); +var constants = SubmissionExportJobSchema.statuses; +var async = require('async'); + +var preparation=require('./preparationSteps'); +var submissionDataExport=require('./submissionDataExport'); +var commonJobFunctions = require('../commonJobFunctions'); + +var progressPublisher = require('../../../lib/jobs/progressPublisher'); +const PROGRESS_EVENT = progressPublisher.PROGRESS_EVENT; +const STATUS_EVENT = progressPublisher.STATUS_EVENT; +const FINISH_EVENT = progressPublisher.FINISH_EVENT; +const FAIL_EVENT = progressPublisher.FAIL_EVENT; +const HEARTBEAT_EVENT = 'heartbeat'; + + +/** + * This object represent the runner that will be in charge of exporting the collections containing submissions + * + * The produced archive will be a tar file containing one bson.gz file for each collection. + * + * @param context + * @param context.jobID the agenda job ID + * @param context.exportJob the mongo object that will contain the progress of the export + * @param context.outputDir the destination directory. Here is where the export tree will be produced. + * @param keepalive the keepalive timeout + * @constructor + */ +function SubmissionExportRunner(context, keepalive) { + EventEmitter.call(this); + this.keepalive = keepalive ? keepalive : 30000; + + context.emitter = this; + this.context = context; +} + +util.inherits(SubmissionExportRunner, EventEmitter); + +SubmissionExportRunner.prototype.run = function() { + + var logger = this.context.logger; + logger.info('Submission export started'); + + if (this.status === constants.FINISHED || this.status === constants.FAILED) { + logger.info('[SUBMISSIONEXPORT] Submission export finished'); + this.emit(STATUS_EVENT, this.status); + return; + } + if (this.status === constants.INPROGRESS) { + logger.info('[SUBMISSIONEXPORT] Export already in progress, aborting'); + + this.emit(FAIL_EVENT, 'Export failed due to export already in progress'); + return; + } + + startExport.bind(this)(); +}; + + + +/** + * Starts the submission data export process. + * The method pass a 'context' object to the function composing the export flow. Such context will be shared among all + * the functions, and each function will be able to enrich it with new informations. + */ +function startExport() { + var self = this; + self.interval = this.heartbeat(); + var context = self.context; + + var logger = context.logger; + + async.waterfall([ + //Preparing for the submission export + async.apply(preparation.prepare, context), + // Export all submission collections + submissionDataExport.exportData.bind(self), + commonJobFunctions.registerStorage, + commonJobFunctions.updateModelWithStorageData + ], function(err) { + if (self.interval) { + clearInterval(self.interval); + } + if (err) { + logger.error('Export failed', {err: err}); + self.emit(FAIL_EVENT, err); + } else { + logger.info('Export finished'); + self.emit(FINISH_EVENT, 'Export finished'); + + // Make sure to update the `progress` field on success. Number of exported + // collections equals number of total collections now + self.emit(PROGRESS_EVENT, constants.FINISHED, context.collections.length, context.collections.length); + } + + commonJobFunctions.cleanUp(context, function(cleanUpError) { + if (cleanUpError) { + logger.error('Error cleaning up after exporting', {err: cleanUpError}); + } else { + logger.info('Cleanup executed'); + } + }); + }); +} + +SubmissionExportRunner.prototype.heartbeat = function() { + var self = this; + return setInterval(function() { + self.emit(HEARTBEAT_EVENT); + }, self.keepalive); +}; + +module.exports.SubmissionExportRunner = SubmissionExportRunner; +module.exports.HEARTBEAT_EVENT = HEARTBEAT_EVENT; diff --git a/lib/export/submissions/preparationSteps.js b/lib/export/submissions/preparationSteps.js new file mode 100644 index 0000000..4e4f44b --- /dev/null +++ b/lib/export/submissions/preparationSteps.js @@ -0,0 +1,75 @@ +var async = require('async'); +var fhMbaasMiddleware = require('fh-mbaas-middleware'); +var commonPreparationSteps = require('../commonPreparationSteps'); +var common = require('../../util/common'); +var path = require('path'); +var SubmissionExportJob = require('../../models').SubmissionExportJob; + +/** + * + * Getting the mongo config for the environment database + * + * @param context + * @param context.exportJob + * @param context.logger + * @param cb + */ +function getEnvDbConf(context, cb) { + var logger = context.logger; + var subExportJob = context.exportJob; + + //fh-mbaas-middleware is responsible for storing all of the environment database configuration + var models = fhMbaasMiddleware.models.getModels(); + models.Mbaas.findOne({ + domain: subExportJob.domain, + environment: subExportJob.environment + }, function(err, envDb) { + if (err) { + logger.warn("Error getting environment database", err); + return cb("Error getting environment database: " + err); + } + + //If there is no environment database, there can be no submissions. No point in exporting then. + if (!envDb) { + return cb("No Environment Database available for environment " + subExportJob.environment + " and domain " + subExportJob.domain); + } + + //The mongo connection string for the environment database. + context.uri = common.formatDbUri(envDb.dbConf || {}); + + return cb(err, context); + }); +} + +/** + * Creates a path specific to submission exports and adds the + * path to the context + */ +function addOutputPathToContext(context, cb) { + var exportJob = context.exportJob; + context.outputPath = path.join(context.outputDir, exportJob.domain, exportJob.environment, exportJob.jobId); + cb(null, context); +} + +/** + * Preparation steps for submission export + * + * @param context + * @param cb + */ +function prepare(context, cb) { + + //For submission export, we will already know the collections that need to be exported + context.collections = ["formsubmissions", "fileStorage.files", "fileStorage.chunks"]; + + async.waterfall([ + async.apply(getEnvDbConf, context), + commonPreparationSteps.connectToDatabase, + commonPreparationSteps.retrieveCollectionsSize, + async.apply(commonPreparationSteps.reserveSpaceIfAvailable, SubmissionExportJob), + addOutputPathToContext, + commonPreparationSteps.createOutputDir + ], cb); +} + +module.exports.prepare = prepare; diff --git a/lib/export/submissions/submissionDataExport.js b/lib/export/submissions/submissionDataExport.js new file mode 100644 index 0000000..d3b0d6c --- /dev/null +++ b/lib/export/submissions/submissionDataExport.js @@ -0,0 +1,41 @@ +var async = require('async'); + +const mongoExportFunctions = require('../mongoExportFunctions'); + +const PROGRESS_EVENT = require('../../jobs/progressPublisher').PROGRESS_EVENT; +const STATUSES = require('../../models/SubmissionDataJobSchema').statuses; + + +/** + * This function triggers the export. + * + * @param context the applicaton data export context + * @param cb + */ +function exportSubmissions(context, cb) { + var self = this; + var logger = context.logger; + var index = 0; + var total = context.collections.length; + + context.archive = { + gzipFiles: [] + }; + + logger.info("Exporting Submissions Collections"); + + async.eachSeries(context.collections, function(collectionName, cb) { + self.emit(PROGRESS_EVENT, STATUSES.INPROGRESS, ++index, total); + var targetFileName = collectionName + '.bson.gz'; + mongoExportFunctions.mongoExportWrapper(self, context, collectionName, targetFileName, index, total, cb); + }, function(err) { + if (!err) { + mongoExportFunctions.createExportArchive(context, cb); + } else { + logger.error('Error exporting from mongo', err); + return cb(err, context); + } + }); +} + +module.exports.exportData = exportSubmissions; diff --git a/lib/formsUpdater/index.js b/lib/formsUpdater/index.js new file mode 100644 index 0000000..42731e6 --- /dev/null +++ b/lib/formsUpdater/index.js @@ -0,0 +1,4 @@ + +module.exports = { + scheduler: require('./lib/agenda_scheduler') +}; \ No newline at end of file diff --git a/lib/formsUpdater/lib/agenda_scheduler.js b/lib/formsUpdater/lib/agenda_scheduler.js new file mode 100644 index 0000000..6c55168 --- /dev/null +++ b/lib/formsUpdater/lib/agenda_scheduler.js @@ -0,0 +1,85 @@ +var Agenda = require('fh-agenda'); +var os = require('os'); +var _ = require('underscore'); + +/** + * Starts scheduled (agenda) jobs processing, + * + * agenda can be disabled in configuration, + * the list of jobs to be performed by given node is specified in configuration. + * + * Jobs are stored under ./jobs/ directory and referenced by job_name (corresponding to job_name in configuration). + * + * Sample configuration: + * + * { + * agenda: { + * enabled: true, + * jobs: { + * job_name: { + * // job configuration + * } + * } + * } + * } + * + * @param logger fh-logger instance + * @param config configuration for this fh-messaging process + * @param mongoConnectionString the MongoDB db connection that agenda jobs will be stored in + * @returns {{tearDown: Function}} + */ +module.exports = function( logger, config, mongoConnectionString ) { + var agenda; + + if (config.agenda && !config.agenda.enabled ) { + logger.info( 'Agenda is disabled, skipping' ); + } else { + var jobTypes = ( config.agenda && config.agenda.jobs ) ? Object.keys(config.agenda.jobs) : []; + + if ( !jobTypes.length ) { + logger.info( 'No Agenda jobs specified, skipping' ); + } else { + logger.info( 'Setting up Agenda', process.pid ); + agenda = new Agenda({ + db: {address: mongoConnectionString, collection: 'agendaJobs'}, + name: os.hostname() + '-' + process.pid, + defaultConcurrency: 1, + defaultLockLifetime: 10000 + }, function() { + jobTypes.forEach( function( type ) { + require( './jobs/' + type )( logger, config, agenda ); + }); + + agenda.on('fail', function(err, job) { + logger.warn("Job Fail ", {err: err, job: job}); + }); + + agenda.start(); + logger.info( 'Agenda set up' ); + }); + } + } + + // Public API + return { + /** + * Stops the job queue processing and unlocks currently running jobs. + * + * If agenda wasn't set up, calls callback immediately. + * + * @param callback called when agenda is stopped + */ + tearDown: function( callback ) { + callback = callback || _.noop; + if ( agenda && agenda.stop ) { + logger.info("Stopping Agenda"); + agenda.stop(function() { + logger.info("Agenda stopped"); + callback(); + }); + } else { + callback(); + } + } + }; +}; \ No newline at end of file diff --git a/lib/formsUpdater/lib/jobs/data_source_update.js b/lib/formsUpdater/lib/jobs/data_source_update.js new file mode 100644 index 0000000..a494daa --- /dev/null +++ b/lib/formsUpdater/lib/jobs/data_source_update.js @@ -0,0 +1,25 @@ +var dataSourceUpdater = require('../../../dataSourceUpdater'); + +module.exports = function(logger, config, agenda) { + + var dsUpdater = dataSourceUpdater(logger); + + logger.debug("Setting Up Job ", config.agenda); + + agenda.define('data_source_update', function(job, done) { + logger.info("#data_source_update Starting" ); + + dsUpdater.handlers.updateAllEnvDataSourceCache({}, function(err) { + if (err) { + logger.error("Error Updating Data Sources", {error: err}); + } + logger.info("#data_source_update Finished"); + + done(); + }); + + }); + + agenda.every( config.agenda.jobs.data_source_update.schedule, 'data_source_update' ); +}; + diff --git a/lib/handlers/analytics/messaging.js b/lib/handlers/analytics/messaging.js new file mode 100644 index 0000000..f4b1432 --- /dev/null +++ b/lib/handlers/analytics/messaging.js @@ -0,0 +1,38 @@ +/** + * handles sending app messages to fh-messaging from apps component in the mbaas. + */ +var logger = require('../../util/logger').getLogger(); +var _ = require('underscore'); + +module.exports = function(messagingClient) { + return { + "createAppMessage": function appMessage(req, res, next) { + var messages = req.body; + var topic = req.params.topic; + var sender = req.params.appid; + logger.debug(topic,messages); + if (! topic || ! messages) { + return next({"code":400,"message":"no topic"}); + } + res.statusCode = 201; + res.end(); + //pre process check the messages are for the same app as sent them + + logger.debug("app messaging: request ended. Doing post request processing"); + + messagingClient.createAppMessage(topic, validateAppMessages(sender,messages), function done(err) { + if (err) { + logger.warn("app messaging: error occurred sending message to messaging ",err); + } + }); + } + }; +}; + + +function validateAppMessages(sender,messages) { + var toValidate = Array.isArray(messages) ? messages : [messages]; + return _.filter(toValidate, function valid(m) { + return (m && m.guid && m.guid === sender); + }); +} diff --git a/lib/handlers/analytics/metrics.js b/lib/handlers/analytics/metrics.js new file mode 100644 index 0000000..5a6fd24 --- /dev/null +++ b/lib/handlers/analytics/metrics.js @@ -0,0 +1,44 @@ +/** + * handles retrieving metrics from the fh-metrics component in the mbaas. + */ + + +var fhconfig = require('fh-config'); +var metricsConfig = fhconfig.getConfig().rawConfig.fhmetrics; +var fhMetricsClient = require('fh-metrics-client')(metricsConfig); +var _ = require('underscore'); +var async = require('async'); + +module.exports = { + "getMetrics": function metrics(req,res,next) { + if (! req.query.from || ! req.query.to) { + return next({"code":400,message: "expected a to and from query param "}); + } + + var params = { + "from":req.query.from, + "to":req.query.to + }; + + async.parallel([ + function getDomain(callback) { + fhMetricsClient.getAllDomainMetrics(params, function(err, ok) { + callback(err,ok); + }); + }, + function getApp(callback) { + fhMetricsClient.getAllAppMetrics(params, function(err,ok) { + callback(err,ok); + }); + } + ], function done(err, ok) { + if (err) { + return next(err); + } + res.metrics = _.extend(ok[0],ok[1]); + next(undefined,res.metrics); + }); + + + } +}; diff --git a/lib/handlers/analytics/metricsRouter.js b/lib/handlers/analytics/metricsRouter.js new file mode 100644 index 0000000..73b0a48 --- /dev/null +++ b/lib/handlers/analytics/metricsRouter.js @@ -0,0 +1,38 @@ +/** + * + * @type {*|exports|module.exports} + * + */ + +var express = require('express'); +var auth = require('../../middleware/auth'); +var metrics = require('./metrics'); +var fhconfig = require('fh-config'); + +var router = express.Router({ + mergeParams: true +}); + +router.use(auth.admin(fhconfig)); + + +router.get("/",metrics.getMetrics); + +router.use(function analyticsErrorHandler(err, req, res, next) { + if (err.code) { + res.statusCode = err.code || 500; + } + res.json(err); +}); + +router.use(function(req,res) { + if (res.metrics) { + return res.json(res.metrics); + } else { + res.statusCode = 404; + res.json({"message":"no route found"}); + } +}); + +module.exports = router; + diff --git a/lib/handlers/api.js b/lib/handlers/api.js new file mode 100644 index 0000000..ab803ea --- /dev/null +++ b/lib/handlers/api.js @@ -0,0 +1,102 @@ +'use strict'; + +var express = require('express'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var fhconfig = require('fh-config'); +var auth = require('../middleware/auth.js'); +var middleware = require('../middleware/mbaasApp.js'); +var fhamqpjs = require('fh-amqp-js'); +var metricsRouter = require('./analytics/metricsRouter.js'); +var statsRouter = require('./stats/stats_router.js'); +var eventMiddleware = require('../middleware/events'); +var appdataRouter = require('./app/data.js'); +var deleteEnvironmentData = require('../services/environment/deleteEnvironmentData.js'); +var logger = require('../util/logger.js').getLogger(); + +var router = new express.Router({ + mergeParams: true +}); + +var DB_CREATION_APP_TYPES = fhconfig.value("auto_create_app_dbs"); + + +router.use(auth.admin(fhconfig)); + + +router.use('/metrics', metricsRouter); +router.use('/stats', statsRouter); + +//to avoid race conditions, we will only set the db conf values on model creation. Since we have a unique composite index added for domain and environment, once the first record is created, the second creation will fail. +//then we will only create the mongdo db if the data creation is successful. If the mongo db creation is failed for whatever reason, we will delete the model. +router.post('/:domain/:environment/db', fhmbaasMiddleware.envMongoDb.getOrCreateEnvironmentDatabase, function(req, res) { + return res.json({uri: req.mongoUrl}); +}); + +router.post('/apps/:domain/:environment/:appname/migratedb', fhmbaasMiddleware.app.findOrCreateMbaasApp, middleware.createDbMiddleware, middleware.stopAppMiddleware, middleware.migrateDbMiddleware, middleware.notifyAppDbMigration('start'), function(req, res) { + return res.json(req.createDbResult); +}); + +//delete app databases associated with this domain and environment. Delete the environment datatabase and the environment db config +router["delete"]('/:domain/:environment', function deleteEnvironment(req, res,next) { + let domain = req.params.domain; + let environment = req.params.environment; + deleteEnvironmentData(domain,environment,function done(err){ + if (err){ + logger.error("error deleting environment data ",err); + return next(err); + } + res.json({"message":"environment data deleted"}); + }); +}); + +//Deleting The App From The MbaaS. +router['delete']('/apps/:domain/:environment/:appname',eventMiddleware.createAppEvent(fhamqpjs.EventTypes.CORE_APP_DELETE_REQUESTED, "delete requested core to mbaas"), + fhmbaasMiddleware.app.findMbaasApp, + middleware.removeDbMiddleware, function(req, res) { + return res.json(req.resultData); + }); + +router.get('/apps/:domain/:environment/:appname/env', fhmbaasMiddleware.app.findMbaasApp, middleware.modelsInfo, function(req, res) { + return res.json(req.resultData); +}); + +//This route stores deployment information to fhmbaas +router.post('/apps/:domain/:environment/:appname/deploy', fhmbaasMiddleware.app.findOrCreateMbaasApp, fhmbaasMiddleware.app.updateMbaasApp, middleware.createDbForAppTypes(DB_CREATION_APP_TYPES), function(req, res) { + //Finished and no errors. + res.json(req.appMbaasModel.toJSON()); +}); + +//Routes for dealing with services +router.use('/:domain/:environment/services', require('../routes/services/router.js')); + + +//All Of The Routes Required For Forms Operations For Each Mbaas Environmnet +router.use('/:domain/:environment/appforms', require('./forms.js')); + + +/** + * Error Handler For Admin API Requests + */ +router.use(function(err, req, res, next) { // jshint unused:false + return next(err,req,res); +}); + + +router.use('/:domain/:environment/:appid/data', appdataRouter); + +router.get("/:domain/:environment/apps/:guid/events", fhmbaasMiddleware.events.list,end); +router.post("/:domain/:environment/apps/:guid/events", fhmbaasMiddleware.events.create,end); +router.get('/:domain/:environment/apps/:guid/alerts', fhmbaasMiddleware.alerts.list,end); +router.post('/:domain/:environment/apps/:guid/alerts', fhmbaasMiddleware.alerts.create,end); +router.post('/:domain/:environment/apps/:guid/alerts/testEmails', fhmbaasMiddleware.alerts.testEmails, end); +router.put('/:domain/:environment/apps/:guid/alerts/:id', fhmbaasMiddleware.alerts.update,end); +router["delete"]('/:domain/:environment/apps/:guid/alerts/:id', fhmbaasMiddleware.alerts.del,end); +router.get('/:domain/:environment/apps/:guid/notifications', fhmbaasMiddleware.notifications.list,end); + +function end(req,res) { + return res.json(req.resultData); +} + + + +module.exports = router; diff --git a/lib/handlers/app.js b/lib/handlers/app.js new file mode 100644 index 0000000..dbc188d --- /dev/null +++ b/lib/handlers/app.js @@ -0,0 +1,37 @@ +var express = require('express'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var appMessageHandler = require('../handlers/analytics/messaging'); +var fhconfig = require('fh-config'); +var messagingConfig = fhconfig.getConfig().rawConfig.fhmessaging; +var logger = require('../util/logger').getLogger(); +var util = require('util'); +var messagingClient = require('fh-messaging-client')(messagingConfig,logger); +var db = require('./app/db.js'); +var router = express.Router({ + mergeParams: true +}); + +/** + * NOTE fhmbaasMiddleware.auth.app expects that you have a url that specifies path params with exactly /:environment && /:domain && /:appid you must follow this pattern otherwise + * you will always recieve a 401. + * It also requires the following headers 'x-fh-auth-app', 'x-fh-env-access-key' + * //todo auth.app maybe it should be clear about its expected api and do something like auth.app(domain,env,appid) this could then return configured middleware + */ + +router.use('/:domain/:environment/:projectid/:appid/appforms', require('./app/forms.js')); +router.post("/:domain/:environment/:projectid/:appid/events", fhmbaasMiddleware.auth.app, fhmbaasMiddleware.events.create,end); +router.post("/:domain/:environment/:projectid/:appid/message/:topic", fhmbaasMiddleware.auth.app, appMessageHandler(messagingClient).createAppMessage); +router.get("/:domain/:environment/:projectid/:appid/dbconnection", fhmbaasMiddleware.auth.app, db.getConnectionString, end); + +function end(req,res) { + return res.json(req.resultData); +} + +// eslint-disable-next-line no-unused-vars +router.use(function appRouteErrorHandler(err, req, res, next) { + res.statusCode = err.code || 500; + logger.error(util.inspect(err)); + res.json(err); +}); + +module.exports = router; diff --git a/lib/handlers/app/data.js b/lib/handlers/app/data.js new file mode 100644 index 0000000..477ca95 --- /dev/null +++ b/lib/handlers/app/data.js @@ -0,0 +1,79 @@ +var express = require('express'); +var auth = require('../../middleware/auth'); +var fhconfig = require('fh-config'); +var middleware = require('../../middleware/appdata'); +var import_middleware = require('../../middleware/appdata_import'); +var util = require('util'); +var logger = require('../../util/logger').getLogger(); + +/** + * Router for app data import and export + * to be mounted on /mbaas/:domain/:environment/:appid/data/ + * @see docs/api/appdata.yaml + * @type {express.Router} + */ +var router = express.Router({ + mergeParams: true +}); + +router.use(auth.admin(fhconfig)); + +router.param('job_id', middleware.find); + +// List export jobs +router.get('/export', middleware.filteredJobs, function(req, res) { + return res.send(req.jobs); +}); + +// List import jobs +router.get('/import', import_middleware.filteredJobs, function(req, res) { + return res.send(req.jobs); +}); + +// Start export job +router.post('/export', middleware.fillStopApp, middleware.create, function(req, res) { + res.send(req.job); +}); + +// Start import job +router.post('/import', + import_middleware.fillBody, + import_middleware.ensureMigrated, + import_middleware.registerUpload, + import_middleware.create, + import_middleware.generateURLForUpload, function(req, res) { + // fhc is only interested in jobId and url + res.send({ + jobId: req.job._id, + url: req.fileUrl.url + }); + }); + +// Read export job +router.get('/export/:job_id', function(req, res) { + return res.send(req.job); +}); + +// Read import job +router.get('/import/:job_id', function(req, res) { + return res.send(req.job); +}); + +router.post('/export/:job_id', + middleware.ensureFinishedAndRegistered, + middleware.generateURL, + function(req, res) { + res.send(req.fileUrl); + }); + + +// eslint-disable-next-line no-unused-vars +router.use(function appDataErrorHandler(err, req, res, next) { + res.statusCode = err.code || 500; + logger.error(util.inspect(err)); + + // fhc expexts a string here + res.send((err && err.message) || err); +}); + +module.exports = router; \ No newline at end of file diff --git a/lib/handlers/app/db.js b/lib/handlers/app/db.js new file mode 100644 index 0000000..fbaacad --- /dev/null +++ b/lib/handlers/app/db.js @@ -0,0 +1,38 @@ +var fhconfig = require('fh-config'); +var logger = require('../../util/logger').getLogger(); +var exec = require('child_process').exec; + +exports.getConnectionString = function(req, res, next) { + + logger.info({appMbaasModel: req.appMbaasModel}); + + var appModel = req.appMbaasModel; + if (! appModel || ! appModel.dbConf) { + return next({"code":404, "message": "no db conf for app"}); + } + + fhconfig.reload([], function reloaded(err) { + if (err) { + return next(err); + } + + var mongoHost = fhconfig.value('mongo.host'); + var replicaSet = fhconfig.value('mongo.replicaset_name'); + + logger.info({mongoHost: mongoHost}); + req.resultData = { + url: buildConnectionString(appModel.dbConf, mongoHost, replicaSet) + }; + return next(); + + }); +}; + +function buildConnectionString(dbConf, mongoHost, replicaSet) { + var connString = "mongodb://" + dbConf.user + ":" + dbConf.pass + "@" + mongoHost + ":" + dbConf.port + "/" + dbConf.name; + if (replicaSet) { + connString += "?replicaSet=" + replicaSet; + } + + return connString; +} diff --git a/lib/handlers/app/forms.js b/lib/handlers/app/forms.js new file mode 100644 index 0000000..be7cd60 --- /dev/null +++ b/lib/handlers/app/forms.js @@ -0,0 +1,100 @@ +var express = require('express'); +var fhForms = require('fh-forms'); +var logger = require('../../util/logger').getLogger(); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var handlers = require('./handlers'); +var fhConfig = require('fh-config'); +var multer = require('multer'); + +var router = express.Router({ + mergeParams: true +}); + +//Multipart Form Request Parser +router.use(multer({ + dest: fhConfig.value("fhmbaas.temp_forms_files_dest") +})); + +//Authentication for apps. +router.use(fhmbaasMiddleware.auth.app); + +//Getting The Relevant Environment Database. +router.use(fhmbaasMiddleware.envMongoDb.getEnvironmentDatabase); + +router.use(fhForms.middleware.parseMongoConnectionOptions); + + +//Get Forms Associated With The project +//The association between projects and forms are stored in the core database. +router.get('/forms', fhForms.middleware.formProjects.getFormIds, fhForms.middleware.forms.listDeployedForms); + +//Get A Single Form +router.get('/forms/:id', fhForms.middleware.formProjects.getFormIds, fhmbaasMiddleware.appformsMiddleware.checkFormAssociation, fhForms.middleware.forms.get); + +//Searching For Forms +router.post('/forms/search', fhForms.middleware.formProjects.getFormIds, fhForms.middleware.forms.search); + +//Submit Form Data For A Project +router.post('/forms/:id/submitFormData', fhForms.middleware.forms.submitFormData); + +//Get Theme Associated With A Project +//At the moment, the full definition is in the core database. +//If themes gets lifecycle, the definition will be obtained from the environemnt database. +router.get('/themes', fhForms.middleware.formProjects.getFullTheme); + +//Get Config Associated With A Project +router.get('/config', fhForms.middleware.formProjects.getConfig); + +//Submit A Base64 File To A Submission +//This will decode the file to a binary string before streaming to the mongo database. +router.post('/submissions/:id/fields/:fieldid/files/:fileid/base64', fhForms.middleware.submissions.getRequestFileParameters, fhForms.middleware.submissions.submitFormFileBase64); + +//Submit A File To A Submission +router.post('/submissions/:id/fields/:fieldid/files/:fileid', fhForms.middleware.submissions.getRequestFileParameters, fhForms.middleware.submissions.submitFormFile); + +//Verify Submission And Mark As Complete +router.post('/submissions/:id/complete', fhForms.middleware.submissions.completeSubmission, fhmbaasMiddleware.appformsMiddleware.notifySubmissionComplete); + +//Get The Current Submission Status +router.get('/submissions/:id/status', fhForms.middleware.submissions.status); + +//Listing All Submissions +router.get('/submissions', fhForms.middleware.submissions.listProjectSubmissions); + +//Search For Submissions +router.post('/submissions/search', fhForms.middleware.submissions.listProjectSubmissions); + +//Get A Single Submission +router.get('/submissions/:id', fhForms.middleware.submissions.get); + +//Get A Single File Contained In A Submission. +//This will be streamed to the response. +router.get('/submissions/files/:fileId', fhForms.middleware.submissions.getSubmissionFile, fhForms.middleware.submissions.processFileResponse); + +//Export a submission as a PDF document +router.get('/submissions/:id/exportpdf', handlers.generatePDF); + +router.post('/submissions/export', handlers.exportCSV); + + +/** + * Error Handling Middleware + */ +router.use(function(err, req, res, next) {//jshint unused:false + logger.trace('Error Performing Forms Operation ', err); + res.status(500).json({err: err}); +}); + +/** + * Response Handler Middleware. + */ +router.use(function(req, res) { + req.appformsResultPayload = req.appformsResultPayload || {}; + if (req.appformsResultPayload.data) { + return res.status(200).json(req.appformsResultPayload.data); + } else { + return res.status(204); + } +}); + +module.exports = router; diff --git a/lib/handlers/app/handlers/generateCSV.js b/lib/handlers/app/handlers/generateCSV.js new file mode 100644 index 0000000..a4d5640 --- /dev/null +++ b/lib/handlers/app/handlers/generateCSV.js @@ -0,0 +1,88 @@ +var forms = require('fh-forms'); +var _ = require('underscore'); +var logger = require('../../../util/logger').getLogger(); +var archiver = require('archiver'); + +/** + * + * @param {object} submissionCsvValues + * @param {string} submissionCsvValues. CSV string for the form + * @param {object} res Response Object + * @param {function} next Middleware Next Function + */ +function processExportResponse(submissionCsvValues, res, next) { + var zip = archiver('zip'); + + // convert csv entries to in-memory zip file and stream response + + res.set({ + 'Content-type': 'application/zip', + 'Content-disposition': 'attachment; filename=submissions.zip' + }); + + logger.debug({submissionCsvValues: submissionCsvValues}, "processExportResponse"); + + zip.on('error', function(err) { + logger.error({error: err}, "_processExportResponse "); + if (err && !res.headersSent) { + return next(err); + } + }); + + zip.pipe(res); + + _.each(submissionCsvValues, function(csv, form) { + zip.append(csv, {name: form + '.csv'}); + }); + + zip.finalize(function(err) { + if (err && !res.headersSent) { + logger.error({error: err}, "processExportResponse finalize"); + return next(err); + } + + logger.debug("processExportResponse finalize finished"); + }); +} + +/** + * Export Submissions As CSV Files Contained In A Single Zip + * + * @param {object} req Request Object + * @param {object} res Response Object + * @param {function} next Middleware Next Function + */ +module.exports = function exportCSV(req, res, next) { + + //req.appMbaasModel will already have been defined from authentication. + //If it's not set, then you would never have gotten to this handler + var appMbaasModel = req.appMbaasModel || {}; + + //The file url will be the cloud app url. + //:id and :fileId will be filled in by fh-forms for each file in the submission. + var fileUrlTemplate = '/mbaas/forms/{appGuid}/submission/:id/file/:fileId'.replace('{appGuid}', appMbaasModel.guid); + + var cloudFileUrl = appMbaasModel.url + fileUrlTemplate; + + var params = { + "appId": req.body.projectId, + "subid": req.body.submissionId, + "formId": req.body.formId, + "fieldHeader": req.body.fieldHeader, + //The download url should be the url of the cloud app + "downloadUrl": cloudFileUrl + }; + + logger.debug({body: req.body, params: params}, "Middleware exportCSV "); + + forms.core.exportSubmissions(req.connectionOptions, params, function(err, submissionCsvValues) { + if (err) { + logger.error({error: err}, "Middleware Export Submissions "); + return next(err); + } + + logger.debug({submissionCsvValues: submissionCsvValues.length}, "Middleware exportCSV"); + + processExportResponse(submissionCsvValues, res, next); + }); +}; \ No newline at end of file diff --git a/lib/handlers/app/handlers/generatePDF.js b/lib/handlers/app/handlers/generatePDF.js new file mode 100644 index 0000000..dd9d084 --- /dev/null +++ b/lib/handlers/app/handlers/generatePDF.js @@ -0,0 +1,62 @@ +var forms = require('fh-forms'); +var _ = require('underscore'); +var fhConfig = require('fh-config'); +var fs = require("fs"); +var logger = require('../../../util/logger').getLogger(); + +function createRequestParams(req) { + + //req.appMbaasModel will already have been defined from authentication. + //If it's not set, then you would never have gotten to this handler + var appMbaasModel = req.appMbaasModel || {}; + + //The file url will be the cloud app url. + //:id and :fileId will be filled in by fh-forms for each file in the submission. + var fileUrlTemplate = '/mbaas/forms/{appGuid}/submission/:id/file/:fileId'.replace('{appGuid}', appMbaasModel.guid); + + return { + _id: req.params.id, + pdfExportDir: fhConfig.value('fhmbaas.pdfExportDir'), + filesAreRemote: false, + downloadUrl: appMbaasModel.url, + fileUriPath: fileUrlTemplate, + location: req.appMbaasModel.coreHost, + maxConcurrentPhantomPerWorker: fhConfig.value('fhmbaas.maxConcurrentPhantomPerWorker') + }; +} + +/** + * Handler for generating a PDF representation for an exising Submission + * + * @param req the HTTP request + * @param res the HTTP response + * @param next callback for next function in the express stack + */ +module.exports = function generatePDF(req, res, next) { + var params = createRequestParams(req); + logger.debug("Middleware generatePDF ", {params: params}); + + forms.core.generateSubmissionPdf(_.extend(params, req.connectionOptions), function(err, submissionPdfLocation) { + if (err) { + logger.error("Middleware generatePDF", {error: err}); + return next(err); + } + + logger.debug("Middleware generatePDF ", {submissionPdfLocation: submissionPdfLocation}); + + //Streaming the file as an attachment + res.download(submissionPdfLocation, '' + req.params.id + ".pdf", function(fileDownloadError) { + + //Download Complete, remove the cached file + fs.unlink(submissionPdfLocation, function() { + if (fileDownloadError) { + logger.error("Middleware generatePDF ", {error: fileDownloadError}); + //If the headers have not been sent to the client, can use the error handler + if (!res.headersSent) { + return next(fileDownloadError); + } + } + }); + }); + }); +}; \ No newline at end of file diff --git a/lib/handlers/app/handlers/index.js b/lib/handlers/app/handlers/index.js new file mode 100644 index 0000000..46bfe76 --- /dev/null +++ b/lib/handlers/app/handlers/index.js @@ -0,0 +1,9 @@ + + + + + +module.exports = { + generatePDF: require('./generatePDF'), + exportCSV: require('./generateCSV') +}; \ No newline at end of file diff --git a/lib/handlers/forms.js b/lib/handlers/forms.js new file mode 100644 index 0000000..4b0345e --- /dev/null +++ b/lib/handlers/forms.js @@ -0,0 +1,56 @@ +var express = require('express'); +var common = require('../util/common'); +var formsRouter = require('./forms/forms.js'); +var submissionsRouter = require('../routes/forms/submissions/router'); +var themesRouter = require('./forms/themes.js'); +var projectsRouter = require('./forms/projects.js'); +var dataSourcesRouter = require('../routes/forms/dataSources/router'); +var fhForms = require('fh-forms'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var logger = require('../util/logger').getLogger(); + +var router = express.Router({ + mergeParams: true +}); + +//Getting The Relevant Environment Database. +router.use(fhmbaasMiddleware.envMongoDb.getOrCreateEnvironmentDatabase); + +router.use(fhForms.middleware.parseMongoConnectionOptions); + +router.use("/forms", formsRouter); + +router.use("/submissions", submissionsRouter()); + +router.use("/themes", themesRouter); + +router.use("/apps", projectsRouter); + +router.use("/data_sources", dataSourcesRouter); + +//Response Handler For All Forms Routes +router.use(function(req, res) { + + logger.debug("Responding To Forms Request ", req.originalUrl); + req.appformsResultPayload = req.appformsResultPayload || {}; + var resultData = req.appformsResultPayload.data; + + //Nothing to respond with and no errors. + if (resultData) { + res.status(200); + res.json(resultData); + } else { + res.status(204); + res.json({}); + } +}); + +//Error Handler for forms routes +//jshint unused:false +//express required 4 parameters for the error handler, DO NOT CHANGE! +router.use(function(err, req, res, next) { + logger.error("Error In Forms Request", err ); + return common.handleError(err, err.message || 'Forms Error', err.httpCode || 500, req, res); +}); + +module.exports = router; diff --git a/lib/handlers/forms/forms.js b/lib/handlers/forms/forms.js new file mode 100644 index 0000000..3d2a6c7 --- /dev/null +++ b/lib/handlers/forms/forms.js @@ -0,0 +1,26 @@ +var express = require('express'); + +var fhForms = require('fh-forms'); +var formsMiddleware = fhForms.middleware.forms; + +var router = express.Router({ + mergeParams: true +}); + +//List All Forms +router.get('/', formsMiddleware.list); + +//Get A Single Form +router.get('/:id', formsMiddleware.get); + +router.post('/:id/deploy', formsMiddleware.deploy); + +//Delete A Single Form. Also removes submission data +router.delete('/:id', formsMiddleware.remove); + +router.post('/:id/undeploy', formsMiddleware.undeploy); + +//Submissions Related To This Form. +router.get('/:id/submissions', formsMiddleware.submissions); + +module.exports = router; \ No newline at end of file diff --git a/lib/handlers/forms/projects.js b/lib/handlers/forms/projects.js new file mode 100644 index 0000000..62a3930 --- /dev/null +++ b/lib/handlers/forms/projects.js @@ -0,0 +1,22 @@ +var express = require('express'); +var fhForms = require('fh-forms'); +var formsProjectsMiddleware = fhForms.middleware.formProjects; + +var router = express.Router({mergeParams: true}); + +//Updating Forms To An Existing Project + +router.post('/', formsProjectsMiddleware.update, formsProjectsMiddleware.updateTheme); + +router.put('/:id', formsProjectsMiddleware.update, formsProjectsMiddleware.updateTheme); + +router.put('/:id/config', formsProjectsMiddleware.updateConfig); + +router.delete('/:id', formsProjectsMiddleware.remove); + +//Importing Form Project association Form The Core Platform +router.post('/import', formsProjectsMiddleware.importProjects); + +router.post('/config/import', formsProjectsMiddleware.importProjectConfig); + +module.exports = router; \ No newline at end of file diff --git a/lib/handlers/forms/themes.js b/lib/handlers/forms/themes.js new file mode 100644 index 0000000..3bf95da --- /dev/null +++ b/lib/handlers/forms/themes.js @@ -0,0 +1,25 @@ +var express = require('express'); +var fhForms = require('fh-forms'); +var themesMiddleware = fhForms.middleware.themes; + +var router = express.Router({ + mergeParams: true +}); + +//Deploying a theme to the mbaas. +router.post("/:id/deploy", themesMiddleware.deploy); + +//Deploying a theme to the mbaas. +router.post("/", themesMiddleware.create); + +//Updating A Theme That Already Exists. +router.put("/:id", themesMiddleware.update); + +//Deleting A Theme From The Mbaas. +router.delete("/:id", themesMiddleware.remove); + +//Importing Themes From The Core Platform. +router.post("/import", themesMiddleware.importThemes); + +module.exports = router; + diff --git a/lib/handlers/healthmonitor.js b/lib/handlers/healthmonitor.js new file mode 100755 index 0000000..717ac53 --- /dev/null +++ b/lib/handlers/healthmonitor.js @@ -0,0 +1,105 @@ +var express = require('express'); +var fhconfig = require('fh-config'); +var logger = require('../util/logger').getLogger(); +var MongoClient = require('mongodb').MongoClient; +var request = require('request'); +var ditchHelper = require('../util/ditchhelper.js'); +var util = require('util'); + +function result(id, status, error) { + return { + id: id, + status: status, + error: error + }; +} + +function checkMongoDB(callback) { + MongoClient.connect(fhconfig.mongoConnectionString(), function(err, db) { + if (db && db.close && typeof db.close === 'function') { + db.close(); + } + + if (err) { + logger.error({err: err}, 'health check error: can not connect to mongodb'); + return callback(result('mongodb', 'error', util.inspect(err))); + } + return callback(null, result('mongodb', 'OK', null)); + }); +} + +function checkDitch(callback) { + ditchHelper.checkStatus(function(err, ditchStatus) { + + if (err) { + return callback(result('fh-ditch', 'error', util.inspect(err))); + } + + if (ditchStatus.statusCode && ditchStatus.statusCode !== 200) { + return callback(result('fh-ditch', 'error', JSON.stringify(ditchStatus))); + } + return callback(null, result('fh-ditch', 'OK', null)); + }); +} + +function checkHttpService(name, url) { + return function(callback) { + request.get(url, function(err) { + if (err) { + return callback(result(name, 'error', util.inspect(err))); + } + return callback(null, result(name, 'OK', null)); + }); + }; +} + +function initFhHealth() { + var fhhealth = require('fh-health'); + fhhealth.init(); + fhhealth.setMaxRuntime(10000); + + function constructHealthUrl(host, port, part) { + return 'http://' + host + ':' + port + '/sys/info/' + part; + } + + var statsdUrl = constructHealthUrl(fhconfig.value('fhstats.host'), + fhconfig.value('fhstats.port'), 'ping'); + var metricsUrl = constructHealthUrl(fhconfig.value('fhmetrics.host'), + fhconfig.value('fhmetrics.port'), 'health'); + var messagingUrl = constructHealthUrl(fhconfig.value('fhmessaging.host'), + fhconfig.value('fhmessaging.port'), 'health'); + + var checkFhStats = checkHttpService('fh-statsd', statsdUrl); + var checkFhMetrics = checkHttpService('fh-metrics', metricsUrl); + var checkFhMessaging = checkHttpService('fh-messaging', messagingUrl); + + fhhealth.addCriticalTest('Check Mongodb connection', checkMongoDB); + + if (fhconfig.value('openshift3')) { + fhhealth.addCriticalTest('Check fh-statsd running', checkFhStats); + fhhealth.addCriticalTest('Check fh-metrics running', checkFhMetrics); + fhhealth.addCriticalTest('Check fh-messaging running', checkFhMessaging); + } else { + fhhealth.addCriticalTest('Check fh-ditch status', checkDitch); + } + + return fhhealth; +} + +function healthMonitorRoutes() { + var fhhealth = initFhHealth(); + var router = new express.Router(); + router.get('/', function(req, res) { + fhhealth.runTests(function(err, testResults) { + var jsonResults = testResults ? JSON.parse(testResults) : null; + if (err || !jsonResults || jsonResults.status !== 'ok') { + res.status(500); + } + res.end(testResults); + }); + }); + + return router; +} + +module.exports = healthMonitorRoutes; diff --git a/lib/handlers/stats/stats_client.js b/lib/handlers/stats/stats_client.js new file mode 100644 index 0000000..7b95598 --- /dev/null +++ b/lib/handlers/stats/stats_client.js @@ -0,0 +1,39 @@ +var logger = require('../../util/logger').getLogger(); +var request = require('request'); +var fhconfig = require('fh-config'); +var statsConfig = fhconfig.getConfig().rawConfig.fhstats; + +function callStats(body, cb) { + var statsHost = statsConfig.protocol + "://" + statsConfig.host + ":" + statsConfig.port; + var url = statsHost + '/stats/history'; + + var params = { + counter: body.counter, + f: body.f + }; + + logger.debug('Proxying to statsd', url, params); + + request.post({ + url: url, + body: params, + headers: { + 'x-feedhenry-statsapikey': statsConfig.apikey + }, + json: true + }, function(err, statsRes, statsBody) { + logger.trace('Proxied to statsd', err, statsRes, statsBody); + + if (err) { + return cb(err); + } + + if (statsRes.statusCode !== 200) { + return cb(new Error('Failed to call stats: ' + statsRes.statusCode)); + } + + return cb(null, statsBody); + }); +} + +module.exports = callStats; \ No newline at end of file diff --git a/lib/handlers/stats/stats_router.js b/lib/handlers/stats/stats_router.js new file mode 100644 index 0000000..57aa647 --- /dev/null +++ b/lib/handlers/stats/stats_router.js @@ -0,0 +1,25 @@ +var express = require('express'); +var logger = require('../../util/logger').getLogger(); +var auth = require('../../middleware/auth'); +var fhconfig = require('fh-config'); +var callStats = require('./stats_client'); + +var router = express.Router({ + mergeParams: true +}); + +router.use(auth.admin(fhconfig)); + +router.post(['/', '/history'], function(req, res) { + logger.trace('stats', req.body); + + callStats(req.body, function(err, statsRes) { + if (err) { + return res.status(500).json(err); + } + + return res.json(statsRes); + }); +}); + +module.exports = router; diff --git a/lib/handlers/sys.js b/lib/handlers/sys.js new file mode 100755 index 0000000..f6b317c --- /dev/null +++ b/lib/handlers/sys.js @@ -0,0 +1,31 @@ +var express = require('express'); +var cors = require('cors'); +var common = require('../util/common.js'); +var healthRoute = require('./healthmonitor.js'); + +function sysRoute() { + var sys = new express.Router(); + sys.use(cors()); + + sys.get('/info/ping', function(req, res) { + res.end("'OK'"); + }); + + + sys.get('/info/version', function(req, res) { + common.getVersion(function(err, version) { + if (err) { + return common.handleError(err, 'Error getting version', 500, req, res); + } + + common.setResponseHeaders(res); + res.end(JSON.stringify(version)); + }); + }); + + sys.use('/info/health', healthRoute()); + + return sys; +} + +module.exports = sysRoute; diff --git a/lib/import/appDataImportController.js b/lib/import/appDataImportController.js new file mode 100644 index 0000000..1a2738c --- /dev/null +++ b/lib/import/appDataImportController.js @@ -0,0 +1,78 @@ +"use strict"; + +var logger = require('../util/logger').getLogger(); +var models = require('fh-mbaas-middleware').models; +var async = require('async'); + +var AppdataJobSchema = require('../models/AppdataJobSchema'); +var AppdataJob = require('../models/index').AppdataJob; + +var status = AppdataJobSchema.statuses; + +function checkAppExists(appId, env, cb) { + var AppMbaasModel = models.getModels().AppMbaas; + AppMbaasModel.findOne({guid: appId, environment: env}, function(err, app) { + if (err) { + return cb(err); + } + + if (!app) { + return cb('No application with id "' + appId + '" could be found'); + } else { + return cb(); + } + }); +} + +function createImportJob(params, cb) { + var job = new AppdataJob(); + job.jobType = AppdataJobSchema.types.IMPORT; + job.domain = params.domain; + job.environment = params.environment; + job.appid = params.appid; + job.metadata = { + fileSize: params.filesize, + filePath: params.filepath, + fileId: params.fileId, + uploadFinished: false + }; + + job.save(function(err) { + return cb(err, job); + }); +} + +exports.startImportJob = function(params, callback) { + var appId = params.appid; + var env = params.environment; + + async.waterfall([ + async.apply(checkAppExists, appId, env), + async.apply(AppdataJob.findOne.bind(AppdataJob), { + appid: appId, + status: { + $in: [status.QUEUED, status.INPROGRESS] + } + }) + ], function(err, task) { + if (err) { + logger.error('[APPDATAIMPORT] Error searching existing tasks', err); + return callback(err); + } + + if (task) { + logger.warn('[%s] import is already in progress', appId); + return callback({code: 409, message: 'created'}); + } + + logger.debug('[%s] Creating import task db model', appId, params); + createImportJob(params, function(err, task) { + if (err) { + logger.error('[APPDATAIMPORT] Error creating task', err); + return callback(err); + } + + return callback(null, task); + }); + }); +}; \ No newline at end of file diff --git a/lib/jobs/appDataExportCleanerJob.js b/lib/jobs/appDataExportCleanerJob.js new file mode 100644 index 0000000..af6e7be --- /dev/null +++ b/lib/jobs/appDataExportCleanerJob.js @@ -0,0 +1,55 @@ +var log = require('../util/logger'); + +var TaggedLogger = require('./taggedLogger').TaggedLogger; + +const LOG_TAG = '[APPDATAEXPORT CLEANER]'; + +var logger = new TaggedLogger(log.getLogger(), LOG_TAG); + +var Cleaner = require('../export/cleaner/appDataExportCleanerRunner').AppDataExportCleanerRunner; + +const FINISH_EVENT = require('./progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('./progressPublisher').FAIL_EVENT; +const ProgressPublisher = require('./progressPublisher').ProgressPublisher; + +const JOB_NAME = 'appExportCleaner'; +const DEFAULT_EXPORT_CLEANER_FREQUENCY = '*/30 * * * *'; + +function cleanerJob(agenda, opts) { + opts = opts || {}; + var frequency = opts.frequency || DEFAULT_EXPORT_CLEANER_FREQUENCY; + logger.info('Defining agenda job', JOB_NAME); + + agenda.define(JOB_NAME, function(job, done) { + logger.info('Start running job', {jobName: JOB_NAME}); + + var context = { + logger: logger + }; + + var cleaner = new Cleaner(context); + cleaner.on(FINISH_EVENT, function() { + logger.info('Job execution finished'); + done(); + }); + cleaner.on(FAIL_EVENT, function(message) { + logger.info('Job execution failed', message); + done(message); + }); + + var publisherFunction = function(message) { + logger.info('EVENT', message); + }; + + // We do not want to make 'batch' update, so we persist each received message: queue size = 1 + var progressPublisher = new ProgressPublisher(1, publisherFunction); + progressPublisher.listen(cleaner); + + cleaner.run(); + }); + + agenda.every(frequency, JOB_NAME); +} + +module.exports.JOB_NAME = JOB_NAME; +module.exports.cleanerJob = cleanerJob; diff --git a/lib/jobs/appDataJob.js b/lib/jobs/appDataJob.js new file mode 100644 index 0000000..7f256df --- /dev/null +++ b/lib/jobs/appDataJob.js @@ -0,0 +1,93 @@ +var models = require('fh-mbaas-middleware').models; +var fhconfig = require('fh-config'); +var logger = require('../util/logger').getLogger(); + +var AppDataExportRunner = require('../export/AppDataExportRunner').AppExportRunner; +var ProgressPersistor = require('./progressPersistor').ProgressPersistor; +var AppDataImportRunner = require('../appdata/import/appDataImportRunner').AppDataImportRunner; + +const contextBuilder = require('./context').contextBuilder; + +const JOB_TYPES = require('../models/AppdataJobSchema').types; +const OUTPUT_DIR = fhconfig.value('fhmbaas.appdataexport.output_dir'); +const EXPORT_LOG_TAG = '[APPDATAEXPORT]'; +const IMPORT_LOG_TAG = '[APPDATAIMPORT]'; + +function exportContext(appInfo, jobModel, outputDir) { + var TaggedLogger = require('./taggedLogger').TaggedLogger; + + return contextBuilder() + .withApplicationInfo(appInfo) + .withJobModel(jobModel) + .withLogger(new TaggedLogger(logger.child({appInfo: {appGuid: appInfo.guid, name: appInfo.name} }), EXPORT_LOG_TAG)) + .withCustomAtt('outputDir', outputDir) + .build(); +} + +function importContext(appInfo, jobModel) { + var TaggedLogger = require('./taggedLogger').TaggedLogger; + + return contextBuilder() + .withApplicationInfo(appInfo) + .withJobModel(jobModel) + .withLogger(new TaggedLogger(logger.child({appInfo: {appGuid: appInfo.guid, name: appInfo.name} }), IMPORT_LOG_TAG)) + .withCustomAtt('output', {}) + .withCustomAtt('input', { path: jobModel.metadata.filePath}) + .build(); +} + +function handleError(job, error) { + logger.error('[APPDATAJOB] Error occured when running job', {error: error, job: job.toJSON()}); + job.fail(error.message || error, function(err) { + if (err) { + logger.error('[APPDATAJOB] Error when fail job', {err: err}); + } + }); +} + +function start(appDataJobModel, heartbeat) { + logger.info('[APPDATAJOB] Start running job', {job: appDataJobModel.toJSON()}); + var appGuid = appDataJobModel.appid; + var env = appDataJobModel.environment; + var AppMbaasModel = models.getModels().AppMbaas; + + AppMbaasModel.findOne({guid: appGuid, environment: env}, function(err, appData) { + if (err) { + return handleError(appDataJobModel, err); + } + + if (!appData) { + return handleError(appDataJobModel, new Error('No application found with guid[' + appGuid + '] and environment [' + env + ']')); + } + + var runner = null; + var context; + switch (appDataJobModel.jobType) { + case JOB_TYPES.EXPORT: + context = exportContext(appData, appDataJobModel, OUTPUT_DIR); + runner = new AppDataExportRunner(context, heartbeat); + break; + case JOB_TYPES.IMPORT: + context = importContext(appData, appDataJobModel); + runner = new AppDataImportRunner(context, heartbeat); + break; + default: + logger.error('[APPDATAJOB] Unsupported jobType: ' + appDataJobModel.jobType); + break; + } + + if (runner) { + // Persist the progress of the runner + new ProgressPersistor(context.logger).listen(runner, appDataJobModel, function(err) { + if (err) { + logger.error('[APPDATAJOB] Error persisting progress', err); + } + }); + runner.run(); + } + }); +} + +module.exports = { + start: start +}; \ No newline at end of file diff --git a/lib/jobs/appDataRunnerJob.js b/lib/jobs/appDataRunnerJob.js new file mode 100644 index 0000000..367106b --- /dev/null +++ b/lib/jobs/appDataRunnerJob.js @@ -0,0 +1,97 @@ +var logger = require('../util/logger').getLogger(); +var AppdataJobModel = require('../models').AppdataJob; +var AppDataJob = require('./appDataJob'); + +const JOB_NAME = "appDataRunner"; +const DEFAULT_LOCK_LIFETIME = 30*1000; +const DEFAULT_CONCURRENCY = 1; +const DEFAULT_HEARTBEAT_INTERVAL = 30*1000; +const DEFAULT_SCHEDULER_FREQUENCY = '30 seconds'; + +/** + * Find the next job needs to run and run it. + * @param {int} heartBeat the heartbeat frequency of the job (milliseconds) + * @param {int} concurrency how many jobs can run at the same time + * @param {Function} cb + */ +function runNextJob(heartBeat, concurrency, cb) { + logger.info('looking for next app data job to run', {heartbeat: heartBeat, concurrency: concurrency}); + AppdataJobModel.runningJobs(heartBeat, function(err, runningJobs) { + if (err) { + logger.error('failed to find running jobs', {err: err}); + return cb(err); + } + + logger.info('found running jobs', {runningJobs: runningJobs}); + + if (runningJobs.length >= concurrency) { + logger.info('data job concurrency limit reached', {concurrency: concurrency}); + return cb(); + } else { + logger.info('data job concurrency is not reached'); + } + + AppdataJobModel.findNextJob(function(err, nextJob) { + if (err) { + logger.error('failed to find next job to run', {err: err}); + return cb(err); + } + + if (!nextJob || nextJob.length === 0) { + logger.info('no new job to run'); + return cb(); + } + + nextJob = nextJob[0]; + + nextJob.checkCurrentState(function(err) { + if (err) { + logger.error('error occured when check job state', {err: err, job: nextJob}); + } + var jobReady = nextJob.readyToProceed(); + if (jobReady) { + process.nextTick(function() { + AppDataJob.start(nextJob, heartBeat); + }); + } else { + logger.info('job is not ready. Skip it for now', {job: nextJob}); + } + + return cb(); + }); + }); + }); +} + +/** + * Define the scheduler job that will run the app data jobs. + * @param {object} agenda the agenda module + * @param {object} opts + * @param {int} opts.lock_time lock time on the job to prevent multiple worker running the same job (milliseconds) + * @param {int} opts.concurrency how many jobs are allowed to run at the same time + * @param {int} opts.heartbeat how often each job should update its db entry to show it's alive (milliseconds) + * @param {int} opts.frequency how often the scheduler should run to look for next job + */ +module.exports = function(agenda, opts) { + opts = opts || {}; + var lockTime = opts.lock_time || DEFAULT_LOCK_LIFETIME; + var concurrency = opts.concurrency || DEFAULT_CONCURRENCY; + var heartBeat = opts.heartbeat || DEFAULT_HEARTBEAT_INTERVAL; + var frequency = opts.frequency || DEFAULT_SCHEDULER_FREQUENCY; + + logger.info('defining agenda data job', {jobName: JOB_NAME, frequency: frequency, lockTime: DEFAULT_LOCK_LIFETIME}); + + agenda.define(JOB_NAME, {lockLifetime: lockTime}, function(job, done) { + runNextJob(heartBeat, concurrency, function(err) { + if (err) { + logger.error('Error occured when run app data job', {err: err}); + } + return done(err); + }); + + }); + + agenda.every(frequency, JOB_NAME); +}; + +module.exports.JOB_NAME = JOB_NAME; \ No newline at end of file diff --git a/lib/jobs/appDataStalledJobsFinder.js b/lib/jobs/appDataStalledJobsFinder.js new file mode 100644 index 0000000..e403c21 --- /dev/null +++ b/lib/jobs/appDataStalledJobsFinder.js @@ -0,0 +1,71 @@ +var logger = require('../util/logger').getLogger(); +var AppdataJob = require('../models').AppdataJob; +var async = require('async'); + +const JOB_NAME = "appDataStalledJobsFinder"; +const DEFAULT_LOCK_LIFETIME = 1*60*1000; +const DEFAULT_HEARTBEAT_INTERVAL = 30*1000; +const DEFAULT_STALLED_JOB_FINDER_FREQUENCY = '1 minute'; + +/** + * Find stalled jobs and mark them as failed + * @param {int} heartBeat the heartbeat frequency of the job (milliseconds) + * @param {Function} cb + */ +function failStalledJobs(heartBeat, cb) { + logger.info('looking for stalled jobs', {heartbeat: heartBeat}); + AppdataJob.stalledJobs(heartBeat, function(err, jobs) { + if (err) { + logger.error('failed to find stalled jobs', {err:err}); + return cb(err); + } + + if (jobs.length === 0) { + logger.info('no stalled jobs found'); + return cb(); + } + + logger.info('found stalled jobs', {number_of_jobs: jobs.length}); + async.each(jobs, function(job, callback) { + job.fail('timed out', callback); + }, function(err) { + if (err) { + logger.error('failed to update stalled job', {err: err}); + } else { + logger.info('stalled jobs status updated'); + } + return cb(err); + }); + }); +} + +/** + * Define the repeatable job that will find stalled jobs and mark them failed + * @param {object} agenda the agenda module + * @param {object} opts + * @param {int} opts.lock_time lock time on the job to prevent multiple worker running the same job (milliseconds) + * @param {int} opts.heartbeat how often each job should update its db entry to show it's alive (milliseconds) + * It should be the same as the heartbeat value defined for the appDataRunnerJob + * @param {int} opts.frequency how often the job should run to find stalled jobs + */ +module.exports = function(agenda, opts) { + opts = opts || {}; + var lockTime = opts.lock_time || DEFAULT_LOCK_LIFETIME; + var heartBeat = opts.heartbeat || DEFAULT_HEARTBEAT_INTERVAL; + var frequency = opts.frequency || DEFAULT_STALLED_JOB_FINDER_FREQUENCY; + logger.info('defining agenda data job', {jobName: JOB_NAME, frequency: frequency, lockTime: DEFAULT_LOCK_LIFETIME}); + + agenda.define(JOB_NAME, {lockLifetime: lockTime}, function(job, done) { + + failStalledJobs(heartBeat, function(err) { + if (err) { + logger.error('Error occured when run app data job', {err: err}); + } + return done(err); + }); + }); + + agenda.every(frequency, JOB_NAME); +}; + +module.exports.JOB_NAME = JOB_NAME; \ No newline at end of file diff --git a/lib/jobs/context.js b/lib/jobs/context.js new file mode 100644 index 0000000..ea92bba --- /dev/null +++ b/lib/jobs/context.js @@ -0,0 +1,145 @@ +const PROGRESS_EVENT = require('../jobs/progressPublisher').PROGRESS_EVENT; +var log = require('../util/logger'); + +const STATUSES = require('../models/BaseImportExportJobSchema').statuses; + +/** + * Builder to be used to create context objects by using fluent api. + * @constructor + */ +function Builder() { + var self = this; + self.context = { + logger: log.getLogger(), + emitter: undefined, + progress: { + /** + * Wraps a callback so that a progress is automatically sent when the callback is invoked + * @param cb the callback to be wrapped + * @param customMessage an optional custom message to pass to the progress. + * @param ignoreError defines if the progress must be sent even in case of errors (default to false) + * @returns {Function} + */ + wrappCallback: function(cb, customMessage, ignoreError) { + var progress = self.context.progress; + var message = customMessage || STATUSES.INPROGRESS; + if (ignoreError) { + return function() { + progress.next(message); + cb.apply(self.context.progress, arguments); + }; + } else { + return function() { + if (!arguments[0]) { + progress.next(message); + } + cb.apply(self.context.progress, arguments); + }; + } + }, + /** + * Increments the progress and send a progress event (if total and emitter are both defined) + * @param message a custom message to pass to the progress. If not specified, it will assume the following values: + * * STATUSES.INPROGRESS if current !=== total + * * STATUSES.COMPLETE if current === total + * @param steps how many steps to increment (defaults to 1) + */ + next: function(message, steps) { + var progress = this; + var emitter = self.context.emitter; + + if (progress.total) { + progress.current += steps || 1; + + if (progress.current > progress.total) { + self.context.logger.warn('Number of progress steps (%s) is greater than the configured total (%s)', progress.current, progress.total); + } + + if (emitter) { + var progressMessage = message || (progress.current === progress.total ? STATUSES.FINISHED : STATUSES.INPROGRESS); + emitter.emit(PROGRESS_EVENT, progressMessage, progress.current, progress.total); + } + } + }, + current: 0 + } + }; + + // Fluent api + + /** + * Defines a custom attribute inside the context object + * @param attributeName + * @param value + * @returns {Builder} + */ + this.withCustomAtt = function(attributeName, value) { + self.context[attributeName] = value; + return self; + }; + + /** + * Defines the logger + * @param logger + * @returns {Builder} + */ + this.withLogger = function(logger) { + self.context.logger = logger; + return self; + }; + + /** + * Attaches a job model to the context + * @param jobModel + * @returns {Builder} + */ + this.withJobModel = function(jobModel) { + self.context.jobID = jobModel._id.toString(); + self.context.jobModel = jobModel; + return self; + }; + + /** + * Attaches an application info object to the model + * @param appInfo + * @returns {Builder} + */ + this.withApplicationInfo = function(appInfo) { + self.context.appInfo = appInfo; + return self; + }; + + /** + * Attaches an event emitter to the model. Used for progress events. + * @param emitter + * @returns {Builder} + */ + this.withEventEmitter = function(emitter) { + self.context.emitter = emitter; + return self; + }; + + /** + * Validates received parameters and builds the context object + * @returns {{logger: *, emitter: undefined, progress: {wrappCallback: Builder.context.progress.wrappCallback, next: Builder.context.progress.next, current: number}}|*} + */ + this.build = function() { + if (!self.context.appInfo) { + throw new Error('appInfo field is mandatory'); + } + if (!self.context.jobModel) { + throw new Error('jobModel field is mandatory'); + } + return self.context; + }; +} + +/** + * Returns a context builder instance + * @returns {Builder} + */ +function contextBuilder() { + return new Builder(); +} + +module.exports.contextBuilder = contextBuilder; \ No newline at end of file diff --git a/lib/jobs/progressPersistor.js b/lib/jobs/progressPersistor.js new file mode 100644 index 0000000..b2f8908 --- /dev/null +++ b/lib/jobs/progressPersistor.js @@ -0,0 +1,112 @@ +var _logger = require('../util/logger').getLogger(); +var ProgressPublisher = require('../jobs/progressPublisher').ProgressPublisher; + +const STATUS_EVENT = require('../jobs/progressPublisher').STATUS_EVENT; +const START_EVENT = require('../jobs/progressPublisher').START_EVENT; +const PROGRESS_EVENT = require('../jobs/progressPublisher').PROGRESS_EVENT; +const FINISH_EVENT = require('../jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('../jobs/progressPublisher').FAIL_EVENT; + +const STATUSES = require('../models/AppdataJobSchema').statuses; + +// Persist a change of status. +function persistStatusChange(progressModel, status, cb) { + var logger = this.logger; + logger.info('UPDATING STATUS TO ', status); + + progressModel.set('status', status); + + progressModel.save(function(err) { + if (err) { + logger.error('Failed to update task model due to error ' + err, progressModel); + return cb(err, status); + } + }); +} + +function persistProgressChange(progressModel, progress, current, total, cb) { + var logger = this.logger; + progressModel.set('progress', progress); + progressModel.set('step', current); + progressModel.set('totalSteps', total); + + + progressModel.save(function(err) { + if (err) { + logger.error('Failed to update task model due to error ' + err, progressModel); + return cb(err, progress); + } + }); +} + +function publish(model, message, cb) { + var logger = this.logger; + // Dispatch each message to its persistor function + switch (message.type) { + case STATUS_EVENT: + logger.debug('Persisting event', message); + persistStatusChange.call(this, model, message.data, cb); + return; + case START_EVENT: + logger.debug('Persisting event', message); + persistStatusChange.call(this, model, STATUSES.INPROGRESS, cb); + return; + case PROGRESS_EVENT: + logger.debug('Persisting event', message); + persistProgressChange.call(this, model, message.data, message.current, message.total, cb); + return; + case FINISH_EVENT: + logger.debug('Persisting event', message); + persistStatusChange.call(this, model, STATUSES.FINISHED, cb); + return; + case FAIL_EVENT: + logger.debug('Persisting event', message); + persistStatusChange.call(this, model, STATUSES.FAILED, cb); + return; + default: + return; + } +} + +/** + * Persists the received events to the database using the passed in model + * + * @param emitter the object that will emit the events. Currently, the only saved events are: + * - STATUS_EVENT. Expected params : status text + * - PROGRESS_EVENT. Expected params : progress messagge, current value, total value + * - FINISH_EVENT. No params expected + * - FAIL_EVENT. Expected params: the error message. + * All the other events are ignored. + * @param model the model to be used to persist the events + * @param cb a callback to be called ONLY if an error occurres saving the events + * @constructor + */ +function ProgressPersistor(logger) { + this.logger = logger ? logger: _logger; +} + +ProgressPersistor.prototype.listen = function(emitter, model, cb) { + var self = this; + var logger = self.logger; + + // We reuse the publisher code to 'publish' the events on the database + // Lets create a custom publisher + this.publisherFunction = function(message) { + logger.info('PUBLISHING', message); + publish.call(self, model, message[0], cb); + }; + + // We do not want to make 'batch' update, so we persist each received message: queue size = 1 + this.progressPublisher = new ProgressPublisher(1, this.publisherFunction); + this.progressPublisher.listen(emitter); + + return this; +}; + +module.exports.ProgressPersistor = ProgressPersistor; + +module.exports.STATUS_EVENT = STATUS_EVENT; +module.exports.START_EVENT = START_EVENT; +module.exports.PROGRESS_EVENT = PROGRESS_EVENT; +module.exports.FAIL_EVENT = FAIL_EVENT; +module.exports.FINISH_EVENT = FINISH_EVENT; diff --git a/lib/jobs/progressPublisher.js b/lib/jobs/progressPublisher.js new file mode 100644 index 0000000..abb2c03 --- /dev/null +++ b/lib/jobs/progressPublisher.js @@ -0,0 +1,109 @@ + +const STATUS_EVENT = 'status'; +const START_EVENT = 'start'; +const PROGRESS_EVENT = 'progress'; +const FAIL_EVENT = 'fail'; +const FINISH_EVENT = 'finish'; + +const DEFAULT_QUEUE_SIZE = 10; +/** + * The MessageQueue object is used to save the received message until a certain queue + * size is reached. + * + * @param queueSize the size of the queue + * @constructor + */ +function MessageQueue(queueSize) { + this.queue_size = queueSize ? queueSize : DEFAULT_QUEUE_SIZE; + this.messages = []; + + this.isFull = function() { + return this.messages.length >= this.queue_size; + }; + + this.push = function(message) { + this.messages.push(message); + return this.isFull(); + }; + + // Empties the basked and returns the removed messages + this.empty = function() { + var oldMessages = this.messages; + this.messages = []; + return oldMessages; + }; +} + +/** + * The publisher object has the role of receiving the messages from the emitter and publish them using a + * custom publisher. + * All the received message will get queued until a FINISH_EVENT or FAIL_EVENT is received or a specified queue size + * is reached. + * @param emitter the event emitter + * @param publisher the publisher function to be used to publish the messages. It will receive the message to be + * published as parameter in the format of: + * { type: MESSAGE_TYPE, + * data: MESSAGE_CONTENT + * } + * where MESSAGE_TYPE will be one of: + * - STATUS_EVENT + * - PROGRESS_EVENT + * - FAIL_EVENT + * - FINISH_EVENT + * If a FAIL_EVENT or a FINISH_EVENT is received, the messages in the queue gets flushed automatically. + * @param queueSize the size of the queue to be used to 'batch' the message publishing + * @constructor + */ +function ProgressPublisher(queueSize, publisher) { + + this.publisher = publisher; + + this.publishMessages = function(messageQueue, publisher, flush) { + if (flush || messageQueue.isFull()) { + publisher(messageQueue.empty()); + } + }; + + this.messageQueue = new MessageQueue(queueSize); +} + +ProgressPublisher.prototype.listen = function(emitter) { + this.evtEmitter = emitter; + + var self = this; + + self.evtEmitter.on(STATUS_EVENT, function(status) { + self.messageQueue.push({type: STATUS_EVENT, data: status}); + self.publishMessages(self.messageQueue, self.publisher); + }); + + self.evtEmitter.on(START_EVENT, function(message) { + self.messageQueue.push({type: START_EVENT, data: message}); + self.publishMessages(self.messageQueue, self.publisher); + }); + + self.evtEmitter.on(PROGRESS_EVENT, function(progress, current, total) { + self.messageQueue.push({type: PROGRESS_EVENT, data: progress, current: current, total: total}); + self.publishMessages(self.messageQueue, self.publisher); + }); + + self.evtEmitter.on(FINISH_EVENT, function(message) { + self.messageQueue.push({type: FINISH_EVENT, data: message}); + // flush all the messages + self.publishMessages(self.messageQueue, self.publisher, true); + }); + + self.evtEmitter.on(FAIL_EVENT, function(message) { + self.messageQueue.push({type: FAIL_EVENT, data: message}); + // flush all the messages + self.publishMessages(self.messageQueue, self.publisher, true); + }); +}; + +module.exports.ProgressPublisher = ProgressPublisher; + +module.exports.STATUS_EVENT = STATUS_EVENT; +module.exports.START_EVENT = START_EVENT; +module.exports.PROGRESS_EVENT = PROGRESS_EVENT; +module.exports.FAIL_EVENT = FAIL_EVENT; +module.exports.FINISH_EVENT = FINISH_EVENT; \ No newline at end of file diff --git a/lib/jobs/submissions/submissionExportJob.js b/lib/jobs/submissions/submissionExportJob.js new file mode 100644 index 0000000..c17eaf6 --- /dev/null +++ b/lib/jobs/submissions/submissionExportJob.js @@ -0,0 +1,114 @@ +var fhconfig = require('fh-config'); +var logger = require('../../util/logger').getLogger(); +var async = require('async'); +var SubmissionExportRunner = require('../../export/submissions/SubmissionExportRunner').SubmissionExportRunner; + +var SubmissionExportJob = require('../../models').SubmissionExportJob; +var ProgressPersistor = require('../progressPersistor').ProgressPersistor; + +//in milliseconds. Lock the job for 2 minutes. If the job is running, it should renew the job every 30 seconds +const DEFAULT_LOCK_LIFETIME = fhconfig.value('fhmbaas.appdataexport.default_lock_lifetime'); +//in milliseconds. Should be bigger than the lock lifetime to make sure the job is unlocked. +//If the job is running, it should delay the schedule every 30 seconds +const SCHEDULE_TIME = fhconfig.value('fhmbaas.appdataexport.schedule_time'); +const JOB_NAME = 'submissionExport'; + +var CONSTANTS = require('../../export/constants'); + +const FAIL_EVENT = ProgressPersistor.FAIL_EVENT; +const FINISH_EVENT = ProgressPersistor.FINISH_EVENT; +const HEARTBEAT_EVENT = CONSTANTS.HEARTBEAT_EVENT; + +const OUTPUT_DIR = fhconfig.value('fhmbaas.appdataexport.output_dir'); + +const LOG_TAG = '[SUBMISSIONEXPORT]'; + +function createContext(exportJob, jobID, outputDir) { + var TaggedLogger = require('./taggedLogger').TaggedLogger; + return { + exportJob: exportJob, + jobID: jobID, + outputDir: outputDir, + logger : new TaggedLogger(logger.child({job: jobID }), LOG_TAG) + }; +} + +module.exports = function(agenda) { + + logger.info('defining agenda job', DEFAULT_LOCK_LIFETIME, SCHEDULE_TIME); + + agenda.define(JOB_NAME, {lockLifetime: DEFAULT_LOCK_LIFETIME}, function(job, done) { + var jobId = job.attrs.data.jobId; + logger.info(LOG_TAG + ' Start running job', {jobId: jobId}); + + //schedule another run in case this one is failed + scheduleNextRun(job, new Date(Date.now() + SCHEDULE_TIME)); + + async.waterfall([ + function(cb) { + SubmissionExportJob.findById(jobId, function(err, exportJob) { + return cb(err, exportJob); + }); + } + ], function(err, exportJob) { + //if there is an error getting the job details, then a runner cannot be instantiated. + if (err) { + logger.warn(LOG_TAG + ' Error getting job details. Cannot run job with id ' + jobId, err); + return; + } + + var context = createContext(exportJob, jobId, OUTPUT_DIR); + var runner = new SubmissionExportRunner(context); + + // Persist the progress of the runner + new ProgressPersistor(context.logger).listen(runner, exportJob, function(err) { + if (err) { + logger.error(LOG_TAG + ' Error persisting progress', err); + } + }); + + runner.on(FAIL_EVENT, function(message) { + logger.error(LOG_TAG + ' Job failed. Disabling to prevent future executions', {message : message}); + disableJob(job, function(err) { + if (err) { + logger.error(LOG_TAG + ' Error disabling job', {err: err}); + } + return done(err); + }); + }); + + runner.on(FINISH_EVENT, function(message) { + logger.info(LOG_TAG + ' Job finished. Disabling to prevent future executions', {message : message}); + disableJob(job, function(err) { + if (err) { + logger.error(LOG_TAG + ' Error disabling job', {err: err}); + } + return done(); + }); + }); + runner.on(HEARTBEAT_EVENT, function() { + //renew the lock + + logger.info(LOG_TAG + ' Heartbeat received', {jobId: jobId}); + + job.touch(function() { + logger.debug('[%s] job touched'); + }); + //since current job is still running, push the schedule further + scheduleNextRun(job, new Date(Date.now() + SCHEDULE_TIME)); + }); + + runner.run(); + }); + }); +}; + +function scheduleNextRun(job, when, cb) { + job.schedule(when); + job.save(cb); +} + +function disableJob(job, cb) { + job.disable(); + job.save(cb); +} \ No newline at end of file diff --git a/lib/jobs/taggedLogger.js b/lib/jobs/taggedLogger.js new file mode 100644 index 0000000..fb07ec4 --- /dev/null +++ b/lib/jobs/taggedLogger.js @@ -0,0 +1,39 @@ +function _log(logger, logMethod, tag, message, data) { + if (data) { + logMethod.call(logger, '%s %s ', tag, message, data); + } else { + logMethod.call(logger, '%s %s', tag, message); + } +} + +function TaggedLogger(rootLogger, tag) { + + this.logger = rootLogger; + this.tag = tag; + + this.fatal = function(message, data) { + _log(this.logger, this.logger.fatal, this.tag, message, data); + }; + + this.error = function(message, data) { + _log(this.logger, this.logger.error, this.tag, message, data); + }; + + this.warn = function(message, data) { + _log(this.logger, this.logger.warn, this.tag, message, data); + }; + + this.info = function(message, data) { + _log(this.logger, this.logger.info, this.tag, message, data); + }; + + this.trace = function(message, data) { + _log(this.logger, this.logger.trace, this.tag, message, data); + }; + + this.debug = function(message, data) { + _log(this.logger, this.logger.debug, this.tag, message, data); + }; +} + +module.exports.TaggedLogger = TaggedLogger; \ No newline at end of file diff --git a/lib/messageHandlers/deployStatusHandler.js b/lib/messageHandlers/deployStatusHandler.js new file mode 100644 index 0000000..897cad8 --- /dev/null +++ b/lib/messageHandlers/deployStatusHandler.js @@ -0,0 +1,63 @@ +var amqp = require('../util/amqp.js'); +var supercoreApi = require("../util/supercoreApiClient"); +var logger; +var models = require('fh-mbaas-middleware').models; + +function deployStatusUpdate(json) { + logger.debug("Deploy status recieved: ", json); + if (!json || !json.appname) { + logger.error("Failed to send deploy status to supercore. appname variable missing", json); + return; + } + var AppMbaasModel = models.getModels().AppMbaas; + AppMbaasModel.findOne({ + name: json.appname + }, function(err, appMbaas) { + if (err) { + return logger.error('Failed to lookup mbaasApp model with appName ' + json.appname); + + } + if (!appMbaas) { + return logger.error('No appMbaasModel found for app ' + json.appname); + } + json.appGuid = appMbaas.guid; + json.domain = appMbaas.domain; + json.env = appMbaas.environment; + var supercoreMessageType = "deployStatus"; + supercoreApi.sendMessageToSupercore(appMbaas.coreHost, supercoreMessageType, json, function(err) { + if (err) { + logger.error("Failed to send deploy status to supercore:", err, json); + } + }); + }); +} + +/** + * Listen to amqp deploy status updates and publish it to supercore. + * + * @param conf - main configuration that contains amqp url + * @param amqpConnection - connection to amqp queue - can be undefined when amqp is disabled + */ +function listenToDeployStatus(amqpConnection, conf, callback) { + logger = require('../util/logger').getLogger(); + if (amqpConnection) { + var exchangePrefix = amqp.getExchangePrefix(conf); + //we need to namespace both the exchange and the queue to make sure fh-mbaas + //will not receive messages that do not belong to the same mbaas + var exchangeName = exchangePrefix + '-fh-internal'; + var queueName = 'fh-' + exchangePrefix + '-appDeployStatus'; + logger.info("Subscribing to amqp queue", {exchangeName: exchangeName, queueName: queueName}); + amqpConnection.subscribeToTopic(exchangeName, queueName, 'fh.deployStatus.#', deployStatusUpdate, function(err) { + if (err) { + logger.error("Cannot subscribe to amqp queue", {exchangeName: exchangeName, queueName: queueName, err: err}); + } + callback(); + }); + } else { + logger.warn("Skipping amqp setup for deploy status listerner"); + callback(); + } +} + +module.exports.listenToDeployStatus = listenToDeployStatus; +module.exports.deployStatusUpdate = deployStatusUpdate; diff --git a/lib/messageHandlers/migrationStatusHandler.js b/lib/messageHandlers/migrationStatusHandler.js new file mode 100644 index 0000000..f075f6f --- /dev/null +++ b/lib/messageHandlers/migrationStatusHandler.js @@ -0,0 +1,125 @@ +var amqp = require('../util/amqp.js'); +var supercoreApi = require("../util/supercoreApiClient"); +var log = require('../util/logger'); +var async = require('async'); +var logger; + +var models = require('fh-mbaas-middleware').models; +var dfutils = require('../util/dfutils.js'); + +function completeMigrateDbMiddleware(appMbaas, appInfo, cb) { + var appName = appInfo.appName; + async.series([ + function updateModel(callback) { + if (appInfo.status === 'finished') { + appMbaas.set('migrated', true); + appMbaas.save(function(err) { + if (err) { + logger.error('Failed to save appMbaas', err); + } + return callback(); + }); + } else { + return callback(); + } + }, + function reloadEnv(callback) { + logger.debug('Refresh app envs : ' + appMbaas.name); + dfutils.reloadEnv(appMbaas.domain, appMbaas.environment, appMbaas.name, callback); + }, + function stopAppDbMigrate(callback) { + logger.debug('Set app to stopped: ' + appName); + dfutils.migrateAppDb('stop', appMbaas.domain, appMbaas.environment, appMbaas.name, callback); + } + ], function(err) { + if (err) { + logger.error('[MIGRATION] failed to change app state', {app: appName, err: err}); + return cb(err); + } + logger.info('[MIGRATION] app state changed', {appName: appName}); + return cb(); + }); +} + +function migrationUpdate(json) { + var appGuid = json.appGuid, + env = json.env, + domain = json.domain, + appName = json.appName, + status = json.status, + securityToken = json.securityToken, + messages = json.messages; + logger.debug("Migration update received: ", json); + var AppMbaasModel = models.getModels().AppMbaas; + AppMbaasModel.findOne({ + name: appName + }, function(err, appMbaas) { + if (err) { + return logger.error('Failed to lookup mbaasApp model with appName ' + appName); + + } + if (!appMbaas) { + return logger.error('No appMbaasModel found for app ' + appName); + } + async.parallel([ + function updateCore(callback) { + var data = { + securityToken: securityToken, + domain: domain, + env: env, + appName: appName, + appGuid: appGuid, + messages: messages, + status: status + }; + var supercoreMessageType = "migrationStatus"; + supercoreApi.sendMessageToSupercore(appMbaas.coreHost, supercoreMessageType, data, callback); + }, + function updateApp(callback) { + if (status.toLowerCase() === 'finished' || status.toLowerCase() === 'failed') { + var appInfo = { + domain: domain, + appName: appName, + appGuid: appGuid, + env: env, + status: status + }; + return completeMigrateDbMiddleware(appMbaas, appInfo, callback); + } else { + return callback(); + } + } + ], function(err) { + if (err) { + logger.error('Failed to do migrationUpdate', {error: err}); + } + }); + }); +} + +/** + * Listen to amqp migration status updates and publish it to supercore. + * + * @param conf - main configuration that contains amqp url + * @param amqpConnection - connection to amqp queue - can be undefined when amqp is disabled + */ +function listenToMigrationStatus(amqpConnection, conf) { + logger = log.getLogger(); + if (amqpConnection) { + var exchangePrefix = amqp.getExchangePrefix(conf); + //we need to namespace both the exchange and the queue to make sure fh-mbaas + //will not receive messages that do not belong to the same mbaas + var exchangeName = exchangePrefix + '-fh-internal'; + var queueName = 'fh-' + exchangePrefix + '-dbMigrationUpdate'; + logger.info("Subscribing to amqp queue", {exchangeName: exchangeName, queueName: queueName}); + amqpConnection.subscribeToTopic(exchangeName, queueName, 'fh.dbMigrate.#', migrationUpdate, function(err) { + if (err) { + logger.error("Cannot subscribe to amqp queue", {exchangeName: exchangeName, queueName: queueName, err: err}); + } + }); + } else { + logger.warn("Skipping amqp setup for migration status listerner"); + } +} + +module.exports.listenToMigrationStatus = listenToMigrationStatus; diff --git a/lib/middleware/appdata.js b/lib/middleware/appdata.js new file mode 100644 index 0000000..6c0094b --- /dev/null +++ b/lib/middleware/appdata.js @@ -0,0 +1,31 @@ +var AppdataJob = require('../models').AppdataJob; +var jobTypes = require('../models/BaseImportExportJobSchema').types; +var appExportController = require('../export/appDataExportController'); +var builder = require('./buildJobMiddleware'); + +function buildFilter(req, res, next) { + if (!req.params.appid && !req.params.environment) { + return next(builder.createErrorObject(400, "Missing required arguments: envId, appId")); + } else if (!req.params.appid) { + return next(builder.createErrorObject(400, "Missing required argument appId")); + } else if (!req.params.environment) { + return next(builder.createErrorObject(400, "Missing required argument envId")); + } + return { + jobType: jobTypes.EXPORT, + appid: req.params.appid, + environment:req.params.environment + }; +} + +var middleware = builder( + AppdataJob, + appExportController.startExport.bind(appExportController), + buildFilter); + +middleware.fillStopApp = function(req, res, next) { + req.params.stopApp = req.body.stopApp; + next(); +}; + +module.exports = middleware; \ No newline at end of file diff --git a/lib/middleware/appdata_import.js b/lib/middleware/appdata_import.js new file mode 100644 index 0000000..a8418ad --- /dev/null +++ b/lib/middleware/appdata_import.js @@ -0,0 +1,125 @@ +var AppdataJob = require('../models').AppdataJob; +var jobTypes = require('../models/BaseImportExportJobSchema').types; +var importController = require('../import/appDataImportController'); +var builder = require('./buildJobMiddleware'); +var fhconfig = require('fh-config'); +var storage = require('../storage'); +var path = require('path'); +var models = require('fh-mbaas-middleware').models; + +// The location for all uploaded files +const UPLOAD_PATH = fhconfig.value('fhmbaas.appdata_jobs.upload_dir'); + +/** + * This middleware injects the filter criteria into the request that is used + * to filter for appdata import jobs. + * + * @param req Http request + * @param res Http response + * @param next Invoke next middleware + */ +function buildFilter(req, res, next) { + if (!req.params.appid && !req.params.environment) { + return next(builder.createErrorObject(400, "Missing required arguments: envId, appId")); + } else if (!req.params.appid) { + return next(builder.createErrorObject(400, "Missing required argument appId")); + } else if (!req.params.environment) { + return next(builder.createErrorObject(400, "Missing required argument envId")); + } + return { + jobType: jobTypes.IMPORT, + appid: req.params.appid, + environment:req.params.environment + }; +} + +/** + * Since export and import tasks don't differ a lot between appdata and submissions we + * can use a builder to create a standard middleware. We only specify: + * + * 1) model: which job model to use (AppdataJob, SubmissionsJob) + * 2) spawnFn: which function will be invoked to create the job + * 3) filterFn: which functino will be used to filter the jobs (for + * endpoints like list, read) + * + * The created middleware object will contain all function required for list, read, create + * and generateUrl (though this one is not required for appdata import). + */ +var middleware = builder( + AppdataJob, + importController.startImportJob.bind(importController), + buildFilter); + +/** + * Append properties from body to params. Filename and filesize will be sent + * in the request body. But in later stages we don't want to distinguish between two + * different locations for parameters. So we append all of them to the params array. + * + * @param req Http request + * @param res Http response + * @param next Invoke next middleware + */ +middleware.fillBody = function(req, res, next) { + req.params.filename = req.body.filename; + req.params.filesize = req.body.filesize; + next(); +}; + +/** + * Before the upload starts we already have to register a file in the storage. This is + * required to generate the URL that will be used for the upload. The initial request also + * contains the future filename and filesize. + * + * @param req Http request + * @param res Http response + * @param next Invoke next middleware + */ +middleware.registerUpload = function(req, res, next) { + // filename and filesize are required. 0 is regarded as invalid filesize. + if (!req.params.filename || !req.params.filesize) { + return next(new Error("File name or size missing or invalid")); + } + + // Create the absolute path to the file in the storage (where it will be after upload) + var filePath = path.join(UPLOAD_PATH, req.params.filename); + storage.registerFileForUpload(filePath, req.params.filesize, function(err, fileEntry) { + if (err) { + return next(err); + } + + req.params.fileId = fileEntry._id; + next(); + }); +}; + +/** + * Make sure that the app is migrated before we accept an upload. Only migrated + * apps are supported for import. + * @param req Http request + * @param res Http response + * @param next Invoke next middleware + */ +middleware.ensureMigrated = function(req, res, next) { + var AppMbaasModel = models.getModels().AppMbaas; + var env = req.params.environment; + var appGuid = req.params.appid; + + AppMbaasModel.findOne({guid: appGuid, environment: env}, function(err, app) { + if (err) { + return next(err); + } + + if (!app) { + return next(new Error('No app with guid "' + appGuid + '" could be found')); + } + + if (!app.dbConf || !app.migrated) { + // The app has not been upgraded yet + return next(new Error('The app has not been migrated yet. Import aborted')); + } + + next(); + }); +}; + +module.exports = middleware; \ No newline at end of file diff --git a/lib/middleware/auth.js b/lib/middleware/auth.js new file mode 100644 index 0000000..a78959a --- /dev/null +++ b/lib/middleware/auth.js @@ -0,0 +1,29 @@ +var appAuth = require('fh-mbaas-middleware').auth; + +/** + * Authentication For Administrative APIs in mbaas (/api/mbaas) + * @param fhconfig + * @returns {Function} + */ +function adminAuth(fhconfig) { + + var servicekey = fhconfig.value('fhmbaas.key'); + + return function(req, res, next) { + if (servicekey && servicekey.length > 0) { + var key = req.get('x-fh-service-key'); + if (key === servicekey) { + next(); + } else { + res.status(401).end(); + } + } else { + next(); + } + }; +} + +module.exports = { + admin: adminAuth, + app: appAuth +}; diff --git a/lib/middleware/buildJobMiddleware.js b/lib/middleware/buildJobMiddleware.js new file mode 100644 index 0000000..45ef634 --- /dev/null +++ b/lib/middleware/buildJobMiddleware.js @@ -0,0 +1,119 @@ +var storage = require('../../lib/storage'); +var logger = require('../util/logger').getLogger(); +var mongoose = require('mongoose'); +var _ = require('underscore'); +var jobStatuses = require('../models/BaseImportExportJobSchema').statuses; +function createErrorObject(code, message) { + var err = new Error(); + err.message = message; + err.code = code; + return err; +} +/** + * Creats CRUD middleware for a job export + * @param {Mongoose.model} model Job model, should inherit from BaseImportExportJobSchema + * @param {Function(Object, Function(Error, Mongoose.model))} spawnFn Function to spawn a job runner, + * will receive all request params and a node-style callback for a new model + * @param {Function(Request, Response, Next(Error))} filterFn Function to build a filter for the underlying collection + * @return {Object} An object containing the following express middleware: + * - find: parameter middleware to read single job by id, populates `req.job` + * - all: Return all jobs, populates `req.jobs` + * - crate: invokes `spawnFn` to start a new job, and populates `req.job` + */ +module.exports = function(model, spawnFn, filterFn) { + var middleware = {}; + + middleware.find = function(req, res, next, jobId) { + if (!jobId || ('ObjectID' !== jobId.constructor.name && !mongoose.Types.ObjectId.isValid(jobId))) { + return next(createErrorObject(404, "Job not found")); + } + + model.findById(jobId, function(err, job) { + if (err) { + return next(createErrorObject(500, "Error retrieving job")); + } + + if (!job) { + return next(createErrorObject(404, "Job not found")); + } + req.job = job; + return next(); + }); + }; + + middleware.filteredJobs = function(req, res, next) { + var filter = {}; + if (_.isFunction(filterFn)) { + filter = filterFn(req, res, next); + } + model.find(filter, function(err, jobs) { + if (err) { + err.code = 500; + return next(err); + } + req.jobs = jobs; + return next(); + }); + }; + + + middleware.create = function(req, res, next) { + spawnFn(req.params, function(err, job) { + if (err) { + logger.error("Cannot trigger export.", {err: err}); + return next(err); + } + req.job = job; + next(); + }); + }; + + middleware.ensureFinishedAndRegistered = function(req, res, next) { + var error; + if (req.job.status !== jobStatuses.FINISHED) { + error = new Error('Job still not finished. Current state: ' + req.job.status); + error.code = 400; + return next(error); + } + + if (!req.job.metadata || !req.job.metadata.fileId) { + error = new Error('Job has no registered file'); + error.code = 400; + return next(error); + } + if (!req.job.metadata || req.job.metadata.fileDeleted) { + error = new Error('File for this job has already been deleted'); + // HTTP Gone + error.code = 410; + return next(error); + } + next(); + }; + + // Generates URLs for download and upload routes + function generateStorageUrl(req, fileId, expires, jobId, next) { + storage.generateURL(fileId, expires, jobId, function(err, url) { + if (err) { + logger.error("Error generating url", err); + err.code = 500; + return next(err); + } + req.fileUrl = url; + return next(); + }); + + } + + middleware.generateURL = function(req, res, next) { + generateStorageUrl(req, req.job.metadata.fileId, null, 0, next); + }; + + // The upload URL requires the jobId in the route + middleware.generateURLForUpload = function(req, res, next) { + generateStorageUrl(req, req.job.metadata.fileId, req.job._id.toString(), 0, next); + }; + + return middleware; +}; + +module.exports.createErrorObject = createErrorObject; \ No newline at end of file diff --git a/lib/middleware/events.js b/lib/middleware/events.js new file mode 100644 index 0000000..537033c --- /dev/null +++ b/lib/middleware/events.js @@ -0,0 +1,37 @@ +var fhmiddleware = require("fh-mbaas-middleware"); +var fhamqpjs = require('fh-amqp-js'); +var assert = require('assert'); + +module.exports.createAppEvent = function(eventType,message) { + + function getEvent(eventType) { + var eventTypes = fhamqpjs.EventTypes; + var event = eventTypes.core[eventType] || eventTypes.dynoman[eventType] || eventTypes.monit[eventType] || eventTypes.supercore[eventType] || eventTypes.openshift[eventType]; + assert.ok(event, 'Unknown eventType: ' + eventType); + return event; + } + + function getEventMessage(event,guid, domain, env, email, details) { + return { + uid: guid, + eventType: event.eventType, + env: env, + details: details, + timestamp: new Date().getTime(), + domain: domain, + updatedBy: email, + eventClass: event.eventClass, + eventLevel: event.eventLevel + }; + } + return function(req,res,next) { + var guid = req.body.guid; + var email = req.body.email; + var domain = req.params.domain; + var env = req.params.environment; + var event = getEvent(eventType); + var details = getEventMessage(event, guid, domain, env, email, {"message": message}); + fhmiddleware.events.triggerEvent(details); + next(); + }; +}; diff --git a/lib/middleware/mbaasApp.js b/lib/middleware/mbaasApp.js new file mode 100644 index 0000000..ca83cc4 --- /dev/null +++ b/lib/middleware/mbaasApp.js @@ -0,0 +1,232 @@ +var mongo = require('../util/mongo.js'); +var common = require('../util/common.js'); +var logger = require('../util/logger').getLogger(); +var config = require('fh-mbaas-middleware').config(); +var ditchhelper = require('../util/ditchhelper.js'); +var models = require('fh-mbaas-middleware'); +var appEnv = require('../models/appEnv.js'); +var validation = require('../util/validation.js'); +var _ = require('underscore'); +var fhconfig = require('fh-config'); +var removeAppDB = require('../services/appmbaas/removeAppDb.js'); + +function getAppDbConf(appName, config) { + var db_name = appName; + var user = common.randomUser(); + var pass = common.randomPassword(); + //we do not save the db replicatSet name here just incase it could be changed + var db = { + host: config.mongo.host, + port: config.mongo.port, + name: db_name, + user: user, + pass: pass + }; + return db; +} + +function getSecurityToken(req, res) { + var key; + try { + key = validation.requireParam('securityToken', req, res); + } catch (exception) { + key = null; + } + return key; +} + +function createDatabaseMigrationMiddleware(req, res, next) { + var model = req.appMbaasModel; + var securityToken = getSecurityToken(req, res); + + if (!_.isObject(model)) { + return next(new Error("Invalid Parameters mbaas app not found")); + } + + //Checking if the app has already been migrated + if (model.migrated === true) { + res.status(423); + return next(new Error('Locked - Migration is already completed')); + } + + var name = model.name; + if (securityToken) { + createDbForAppTypes(["feedhenry"])(req, res, next); + } else { + return next(new Error('No securityToken found for app ' + name)); + } +} + +function createDbForAppTypes(types) { + return function middleware(req, res, next) { + var model = req.appMbaasModel; + if (_.has(model.dbConf, "host")) { + return next(); + } + if (types && types.indexOf(model.type) < 0) { + logger.info("not creating db for non openshift3 app ", req.appMbaasModel.type); + return next(); + } + var dbConfig = getAppDbConf(req.appMbaasModel.name, config); + try { + common.checkDbConf(dbConfig); + } catch (e) { + logger.error({ db: dbConfig }, 'db validation failed'); + return next(e); + } + model.dbConf = dbConfig; + model.markModified('dbConf'); + logger.info({ app: model.name, db: dbConfig }, 'try to create database for app'); + mongo.createDb(config, dbConfig.user, dbConfig.pass, dbConfig.name, function (err) { + if (err) { + logger.error(err, 'Failed to create db : %s', dbConfig.name); + return next(err); + } else { + logger.info(dbConfig, 'Database created'); + //Saving The Updated Db Conf + model.save(function (err) { + return next(err, dbConfig); + }); + } + }); + }; +} + +function stopAppMiddleware(req, res, next) { + var model = req.appMbaasModel; + + if (!_.isObject(model)) { + return next(new Error("Invalid Parameters mbaas app not found")); + } + var domain = model.domain; + var env = model.environment; + var name = model.name; + + logger.info({ app: name }, 'Stop app'); + + var dfutils = dfutils = require('../util/dfutils.js'); + + dfutils.stopApp(domain, env, name, function (err) { + + if (err) { + return next(new Error('Failed to stop app ' + name, err)); + } + logger.info({ app: name }, 'App stopped'); + return next(); + }); +} + +function notifyAppDbMigration(action) { + return function (req, res, next) { + var model = req.appMbaasModel; + + if (!_.isObject(model)) { + return next(new Error("Invalid Parameters mbaas app not found")); + } + var domain = model.domain; + var env = model.environment; + var name = model.name; + + + logger.info({ app: name }, 'migrateAppDb app'); + + var dfutils = dfutils = require('../util/dfutils.js'); + + dfutils.migrateAppDb(action, domain, env, name, function (err) { + + if (err) { + return next(new Error('Failed to set app to migrate ' + name, err)); + } + logger.info({ app: name }, 'App set to migrate'); + return next(); + }); + }; +} + +function migrateDbMiddleware(req, res, next) { + var model = req.appMbaasModel; + + if (!_.isObject(model)) { + return next(new Error("Invalid Parameters mbaas app not found")); + } + var coreHost = req.body.coreHost; + if (!coreHost) { + return (new Error("Missing coreHost parater")); + } + var securityToken = getSecurityToken(req, res); + var name = model.name; + var domain = model.domain; + var env = model.environment; + var appGuid = model.guid; + logger.info({ app: name }, 'Migrate App db'); + ditchhelper.doMigrate(domain, env, name, securityToken, appGuid, coreHost, function (err, result) { + if (err) { + return next(new Error('Error when try to migrate db for app ' + name, err)); + } + logger.info({ app: name }, 'App migrated '); + req.createDbResult = result; + next(); + }); +} + +function removeDbMiddleware(req, res, next) { + var model = req.appMbaasModel; + if (!_.isObject(model)) { + logger.warn("Mbaas App Not Found", req.params); + return next(new Error("Invalid Parameters mbaas app not found")); + } else { + + var domain = model.domain; + var env = model.environment; + + removeAppDB(mongo, domain, model, env, function complete(err, removed) { + if (err) { + return next(err); + } else { + req.resultData = removed; + next(); + } + }); + } +} + +function modelsInfo(req, res, next) { + var domain = req.params.domain; + var env = req.params.environment; + var model = req.appMbaasModel; + var mBaas = models.mbaas(); + + if (!model) { + logger.warn("Mbaas App Not Found", req.params); + return next(new Error("Invalid Parameters mbaas app not found")); + } + + logger.debug({ name: model.name }, 'getting env vars for app', req.originalUrl); + + mBaas.findOne({ domain: domain, environment: env }, function (err, mbaas) { + if (err) { + logger.error(err, 'Failed to look up Mbaas/AppMbaas instance'); + return next(new Error('Failed to look up Mbaas/AppMbaas instance')); + } + var envs = appEnv[req.appMbaasModel.type]({ + mbaas: mbaas, + appMbaas: model, + fhconfig: fhconfig, + jsonConfig: config + }); + req.resultData = { env: envs }; + return next(); + }); +} + + + +module.exports = { + createDbMiddleware: createDatabaseMigrationMiddleware, + createDbForAppTypes: createDbForAppTypes, + stopAppMiddleware: stopAppMiddleware, + migrateDbMiddleware: migrateDbMiddleware, + removeDbMiddleware: removeDbMiddleware, + modelsInfo: modelsInfo, + notifyAppDbMigration: notifyAppDbMigration +}; diff --git a/lib/models/AppdataJobSchema.js b/lib/models/AppdataJobSchema.js new file mode 100644 index 0000000..6f8a823 --- /dev/null +++ b/lib/models/AppdataJobSchema.js @@ -0,0 +1,7 @@ +module.exports = require('./BaseImportExportJobSchema')({ + 'appid':{ + type: String, + required: true, + index: true + } +}); diff --git a/lib/models/BaseImportExportJobSchema.js b/lib/models/BaseImportExportJobSchema.js new file mode 100644 index 0000000..62f824f --- /dev/null +++ b/lib/models/BaseImportExportJobSchema.js @@ -0,0 +1,218 @@ +var timestamps = require('mongoose-timestamp'); +var mongoose = require('mongoose'); +var _ = require('underscore'); +var Schema = mongoose.Schema; +var common = require('../util/common'); + +/** + * + * Base Schema for all import / export jobs. + * + * This includes: + * + * - App Data Import + * - Add Data Export + * - Submission Export + * - Submission Import + * + * - environment: string - environment id + * - domain: string - the name of the domain + * - status: + * - "created": model was just created, pending runner spawn (default) + * - "exporting": mongodump underway for an export task + * - "complete": mongodump or mongorestore proccess completed successfully + * - "failed": internal proccess failed irrecoverably (job abandoned and files deleted) + * - step: integer - the current step inside the Status, + * i.e. a percent for downloading, # of exported collections for exporting, etc. + * - totalSteps: integer - total number of steps in the Status, see step + * - stepTimestamp: datetime - time of last step change + * - fileSize: integer - in bytes + * - fileDeleted: boolean - whether the file was deleted by the cleanup job + * - filePath: string - local path for related file (at the moment we're thinking of a gzip with a single + * big mongodump or a tarball with multiple mongodumps) + * + * - fileId: string - for storing the file provided by the download service + * - progress: Mixed - arbitrary metadata for recording internal progress on a task, + * i.e. for the "exporting" status, could be an array for tracking collections already exported + * @type {Schema} + */ +var BASE_SCHEMA_OPTIONS = { + 'jobType': { + type: String, + required: true, + enum: [ + 'export', + 'import', + 'submissionExport' + ] + }, + 'environment':{ + type: String, + required: true, + index: true + }, + 'domain':{ + type: String, + required: true, + index: true + }, + 'status':{ + type: String, + required: true, + enum: [ + 'created', + 'running', + 'complete', + 'failed' + ], + default: 'created' + }, + + // Job progress data + 'step': Number, + 'totalSteps': Number, + 'progress': Schema.Types.Mixed, + 'metadata': Schema.Types.Mixed, + 'logs': [String] +}; + +const statuses = { + "QUEUED": "created", + "INPROGRESS": "running", + "FINISHED": "complete", + "FAILED": "failed" +}; +const types = { + "EXPORT": "export", + "IMPORT": "import" +}; +module.exports = function(options) { + var BaseImportExportJobSchema = new Schema(_.extend(options, BASE_SCHEMA_OPTIONS)); + + BaseImportExportJobSchema.plugin(timestamps, { + createdAt: 'created', + updatedAt: 'modified' + }); + + BaseImportExportJobSchema.statuses = statuses; + BaseImportExportJobSchema.types = types; + + BaseImportExportJobSchema.methods.updateMetadata = function(field, value) { + this.metadata[field] = value; + this.markModified("metadata"); + }; + + /** + * Mark the job as failed + * @param {String} reason the error message, can be null + * @param {Function} cb + */ + BaseImportExportJobSchema.methods.fail = function(reason, cb) { + this.set('status', BaseImportExportJobSchema.statuses.FAILED); + if (reason) { + this.logs.push(reason); + } + this.save(cb); + }; + + /** + * Check if the job is ready to run + * @return {Boolean} + */ + BaseImportExportJobSchema.methods.readyToProceed = function() { + if (this.jobType === types.IMPORT ) { + return this.metadata && (this.metadata.uploadFinished === true); + } else { + return true; + } + }; + + /** + * Check & update the current state of the job. This is mainly for import jobs, to record the current uploaded file size. + * @param {Function} cb [description] + * @return {[type]} [description] + */ + BaseImportExportJobSchema.methods.checkCurrentState = function(cb) { + var self = this; + if (this.jobType === types.IMPORT) { + if (this.metadata && this.metadata.filePath) { + common.readFileSize(this.metadata.filePath, function(err, size) { + if (err) { + return cb(err); + } + self.metadata.currentFileSize = size; + self.save(cb); + }); + } else { + return cb(); + } + } else { + return cb(); + } + }; + + /** + * Find the jobs that are appeared to be running but actually not. + * It will find the jobs that are: + * * the status is in progress + * * but the modified field is before (currentTime - timeout) + * @param {int} timeout the job heartbeat frequency (milliseconds) + * @param {Function} cb + */ + BaseImportExportJobSchema.statics.stalledJobs = function(timeout, cb) { + var lastUpdated = new Date().getTime() - timeout; + this.find({ + status: BaseImportExportJobSchema.statuses.INPROGRESS, + modified:{ + $lt: new Date(lastUpdated) + } + }, cb); + }; + + /** + * Find the real running jobs. Those jobs should be + * * status is in progress + * * the modified field is updated within (currentTime - timeout) + * @param {int} timeout the job heartbeat frequency (milliseconds) + * @param {Function} cb + */ + BaseImportExportJobSchema.statics.runningJobs = function(timeout, cb) { + var lastUpdated = new Date().getTime() - timeout; + this.find({ + status: BaseImportExportJobSchema.statuses.INPROGRESS, + modified:{ + $gte: new Date(lastUpdated) + } + }, function(err, jobs) { + return cb(err, jobs); + }); + }; + + /** + * Find the next job to run. + * @param {Function} cb + */ + BaseImportExportJobSchema.statics.findNextJob = function(cb) { + + // Find all the jobs that : + // are in 'created' state and are of type 'export' + // or are in 'created' state and have type 'import' and upladFinished == true + var qry = { + status: BaseImportExportJobSchema.statuses.QUEUED, + $or: [ + { + jobType: types.IMPORT, + 'metadata.uploadFinished': true + }, + {jobType: types.EXPORT} + ] + }; + + this.find(qry).sort('created').limit(1).exec(cb); + }; + + return BaseImportExportJobSchema; +}; + +module.exports.statuses = statuses; +module.exports.types = types; \ No newline at end of file diff --git a/lib/models/SubmissionDataJobSchema.js b/lib/models/SubmissionDataJobSchema.js new file mode 100644 index 0000000..e5ce2a0 --- /dev/null +++ b/lib/models/SubmissionDataJobSchema.js @@ -0,0 +1,5 @@ + +/** + * Mongoose Schema for Importing / Exporting Submissions + */ +module.exports = require('./BaseImportExportJobSchema')({}); diff --git a/lib/models/appEnv.js b/lib/models/appEnv.js new file mode 100644 index 0000000..e1bd814 --- /dev/null +++ b/lib/models/appEnv.js @@ -0,0 +1,118 @@ +var common = require('../util/common'); +var url = require('url'); +var appEnv = require('fh-mbaas-middleware').appEnv; + +function _parseMbaasUrl(mbaasUrl) { + mbaasUrl = url.parse(mbaasUrl); + var mbaasProtocol = mbaasUrl.protocol ? mbaasUrl.protocol : "https"; + mbaasProtocol = mbaasProtocol.replace(":", ""); + + return { + host: mbaasUrl.host, + protocol: mbaasProtocol + }; +} + +/** + * Getting Environment Variables For A Dynofarm App. + * @param params + * @returns {{}} + */ +function getFeedhenryEnvVars(params) { + var appMbaas = params.appMbaas; + var fhconfig = params.fhconfig; + + var mbaasUrl = _parseMbaasUrl(appMbaas.mbaasUrl); + + var appEnvs = {}; + appEnvs.FH_AMQP_APP_ENABLED = fhconfig.bool('fhamqp.app.enabled'); + appEnvs.FH_AMQP_CONN_MAX = fhconfig.int('fhamqp.max_connection_retry'); + appEnvs.FH_AMQP_NODES = fhconfig.value('fhamqp.nodes'); + appEnvs.FH_AMQP_VHOST = fhconfig.value('fhamqp.vhosts.events.name'); + appEnvs.FH_AMQP_USER = fhconfig.value('fhamqp.vhosts.events.user'); + appEnvs.FH_AMQP_PASS = fhconfig.value('fhamqp.vhosts.events.password'); + appEnvs.FH_DITCH_HOST = fhconfig.value('fhditch.host'); + appEnvs.FH_DITCH_PORT = fhconfig.int('fhditch.port'); + appEnvs.FH_DITCH_PROTOCOL = fhconfig.value('fhditch.protocol'); + appEnvs.FH_MESSAGING_BACKUP_FILE = fhconfig.value('fhmessaging.files.backup_file'); + appEnvs.FH_MESSAGING_CLUSTER = fhconfig.value('fhmessaging.cluster'); + appEnvs.FH_MESSAGING_ENABLED = fhconfig.bool('fhmessaging.enabled'); + appEnvs.FH_MESSAGING_HOST = fhconfig.value('fhmessaging.host'); + appEnvs.FH_MESSAGING_PROTOCOL = fhconfig.value('fhmessaging.protocol'); + appEnvs.FH_MESSAGING_REALTIME_ENABLED = fhconfig.bool('fhmessaging.realtime'); + appEnvs.FH_MESSAGING_RECOVERY_FILE = fhconfig.value('fhmessaging.files.recovery_file'); + appEnvs.FH_MESSAGING_SERVER = url.format({ + protocol: fhconfig.value('fhmessaging.protocol'), + hostname: fhconfig.value('fhmessaging.host'), + port: fhconfig.int('fhmessaging.port'), + pathname: fhconfig.value('fhmessaging.path') + }); + appEnvs.FH_STATS_ENABLED = fhconfig.bool('fhstats.enabled'); + appEnvs.FH_STATS_HOST = fhconfig.value('fhstats.host'); + appEnvs.FH_STATS_PORT = fhconfig.int('fhstats.port'); + appEnvs.FH_STATS_PROTOCOL = fhconfig.value('fhstats.protocol'); + appEnvs.FH_REDIS_HOST = fhconfig.value('fhredis.host'); + appEnvs.FH_REDIS_PORT = fhconfig.int('fhredis.port'); + if (fhconfig.value('fhredis.password') && fhconfig.value('fhredis.password') !== '') { + appEnvs.FH_REDIS_PASSWORD = fhconfig.value('fhredis.password'); + } + appEnvs.LD_LIBRARY_PATH = '/opt/instantclient/'; //legacy env var + + //Only apps that have been migrated to their own database will get this environment variable + if (appMbaas.migrated) { + appEnvs.FH_MONGODB_CONN_URL = common.formatDbUri(appMbaas.dbConf, fhconfig.value('mongo.replicaset_name')); + } + + //Prototcol For Calling Mbaas From Apps + appEnvs.FH_MBAAS_PROTOCOL = mbaasUrl.protocol || "https"; + //App Mbaas Host. Used for apps calling mbaas hosts. + appEnvs.FH_MBAAS_HOST = mbaasUrl.host; + //Access key to verify apps calling Mbaas App APIs. + appEnvs.FH_MBAAS_ENV_ACCESS_KEY = appMbaas.accessKey; + appEnvs.FH_MBAAS_ID = fhconfig.value("fhmbaas.mbaasid"); + + //If the app is a service, ensure the FH_SERVICE_ACCESS_KEY env var is set. + //This will allow authorised data sources to access the service using the X-FH-SERViCE-ACCESS-KEY header. + if (appMbaas.isServiceApp) { + appEnvs.FH_SERVICE_ACCESS_KEY = appMbaas.serviceAccessKey; + } + + return appEnvs; +} + +function getOpenshift3Envars(params) { + var fhconfig = params.fhconfig; + var appMbaas = params.appMbaas; + + var mbaasUrl = _parseMbaasUrl(appMbaas.mbaasUrl); + + var appEnvs = {}; + appEnvs.FH_MBAAS_PROTOCOL = mbaasUrl.protocol; + //App Mbaas Host. Used for apps calling mbaas hosts. + appEnvs.FH_MBAAS_HOST = mbaasUrl.host; + //Access key to verify apps calling Mbaas App APIs. + appEnvs.FH_MBAAS_ENV_ACCESS_KEY = appMbaas.accessKey; + appEnvs.FH_MESSAGING_REALTIME_ENABLED = fhconfig.bool('fhmessaging.realtime'); + appEnvs.FH_MESSAGING_ENABLED = fhconfig.bool('fhmessaging.enabled'); + appEnvs.FH_MESSAGING_HOST = fhconfig.value('fhmessaging.host'); + appEnvs.FH_MESSAGING_CLUSTER = fhconfig.value('fhmessaging.cluster'); + appEnvs.FH_MBAAS_ID = fhconfig.value("fhmbaas.mbaasid"); + appEnvs.FH_STATS_HOST = fhconfig.value("fhstats.host"); + appEnvs.FH_STATS_PORT = fhconfig.value("fhstats.udp.port"); + appEnvs.FH_STATS_ENABLED = fhconfig.value("fhstats.enabled"); + appEnvs.FH_STATS_PROTOCOL = fhconfig.value("fhstats.udp.protocol"); + + //If the app is a service, ensure the FH_SERVICE_ACCESS_KEY env var is set. + //This will allow authorised data sources to access the service using the X-FH-SERViCE-ACCESS-KEY header. + if (appMbaas.isServiceApp) { + appEnvs.FH_SERVICE_ACCESS_KEY = appMbaas.serviceAccessKey; + } + + return appEnvs; +} + +module.exports = { + feedhenry: getFeedhenryEnvVars, + openshift: appEnv.openshift, + openshift3: getOpenshift3Envars +}; diff --git a/lib/models/index.js b/lib/models/index.js new file mode 100644 index 0000000..49de456 --- /dev/null +++ b/lib/models/index.js @@ -0,0 +1,16 @@ +/** + * Initialize mongodb models on top of a connection + * Connection start and cleanup handled by fh-mbaas-middleware + * @param {mongoose.Connection} conn mongodb connection + * @param {Function} cb node-style callback + */ +exports.init = function init(conn, cb) { + try { + exports.AppdataJob = conn.model('AppdataJob', require('./AppdataJobSchema')); + exports.SubmissionExportJob = conn.model('SubmissionsJob', require('./SubmissionDataJobSchema')); + exports.File = require('../storage/models/FileSchema').createModel(conn); + } catch (err) { + cb(err); + } + cb(); +}; \ No newline at end of file diff --git a/lib/routes/forms/dataSources/handlers.js b/lib/routes/forms/dataSources/handlers.js new file mode 100644 index 0000000..5432c83 --- /dev/null +++ b/lib/routes/forms/dataSources/handlers.js @@ -0,0 +1,446 @@ +var fhForms = require('fh-forms'); +var async = require('async'); +var _ = require('underscore'); +var serviceServices = require('../../../services/services'); +var dataSourceUpdaterModule = require('../../../dataSourceUpdater'); +var getDeployedService = require('../../../services/appmbaas/getDeployedService'); +var url = require('url'); +var logger = require('../../../util/logger').getLogger(); +var dataSourceUpdater; + +/** + * Handler For Getting A Single Data Source + * @param req + * @param res + * @param next + */ +function get(req, res, next) { + async.waterfall([ + function(cb) { + fhForms.core.dataSources.get({ + uri: req.mongoUrl + }, { + _id: req.params.id + }, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.json(result); + }); +} + +/** + * Handler For Listing Data Sources + * @param req + * @param res + * @param next + */ +function list(req, res, next) { + async.waterfall([ + function(cb) { + fhForms.core.dataSources.list({ + uri: req.mongoUrl + }, {}, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.json(result); + }); +} + +/** + * Sending A Request To Service + * @param deployedService + * @param cb + */ +function getServiceData(dataSource, deployedService, cb) { + + logger.debug("deployedService", { + deployedService:deployedService + }); + + //Try and get data source data set. + + //Only want a single instance + if (!dataSourceUpdater) { + dataSourceUpdater = dataSourceUpdaterModule(logger); + } + + var serviceHost = deployedService.url || ""; + var path = dataSource.endpoint || ""; + var fullUrl = url.resolve(serviceHost, path); + + dataSourceUpdater.handlers.requestEndpointData({ + fullUrl: fullUrl, + accessKey: deployedService.serviceAccessKey + }, function(err, serviceData) { + if (err) { + logger.error("Error Getting Service Data ", {error: err}); + dataSource.validationResult = { + valid: false, + message: err.userDetail + }; + return cb(dataSource); + } + + if (!serviceData) { + logger.error("No Service Data Passed "); + dataSource.validationResult = { + valid: false, + message: err.userDetail + }; + return cb(dataSource); + } + + //Assigning The Data. Useful for viewing the validation result. + dataSource.data = serviceData; + + logger.debug("serviceData", { + serviceData:serviceData + }); + + cb(undefined, serviceData); + }); +} + +function updateSingleDataSource(req, dataSource, deployedService, cb) { + var fullUrl; + var error; + var currentTime = new Date(); + + logger.debug("updateSingleDataSource ", { + deployedService: deployedService, + dataSource: dataSource + }); + //If there is no deployed service, update the data source to note that the service has not been deployed. + if (!deployedService) { + deployedService = {}; + error = { + userDetail: "Service is not deployed.", + systemDetail: "The Service associated with this Data Source has not been deployed to this environment.", + code: "DS_SERVICE_NOT_DEPLOYED" + }; + } else { + var serviceHost = deployedService.url || ""; + var path = dataSource.endpoint; + fullUrl = url.resolve(serviceHost, path); + } + + //Only want a single instance + if (!dataSourceUpdater) { + dataSourceUpdater = dataSourceUpdaterModule(logger); + } + + dataSourceUpdater.handlers.updateSingleDataSource({ + currentTime: currentTime, + mongoUrl: req.mongoUrl, + error: error, + fullUrl: fullUrl, + accessKey: deployedService.serviceAccessKey, + dataSourceId: dataSource._id + }, cb); +} + +/** + * Handler For Deploying A Data Source + * @param req + * @param res + * @param next + */ +function deploy(req, res, next) { + logger.debug("Deploy Data Source ", req.body); + + var dataSource = req.body || {}; + + async.waterfall([ + function(cb) { + fhForms.core.dataSources.deploy({ + uri: req.mongoUrl + }, dataSource, cb); + }, + function(dataSource, cb) { + serviceServices.addDataSource({ + domain: req.params.domain, + guid: dataSource.serviceGuid, + dataSourceId: dataSource._id, + mongoUrl: req.mongoUrl + }, function(err) { + cb(err, dataSource); + }); + } + ], function(err, deployedDataSource) { + if (err) { + logger.error("Error Deploying Data Source ", {error: err}); + return next(err); + } + + logger.debug("Finished Deploying Data Source", deployedDataSource); + + res.json(deployedDataSource); + + //When finished with the deploy, trigger an update of the Data Source + async.waterfall([ + function getDeployedServ(cb) { + //Get A Deployed Service Related To The Data Source + getDSDeployedService(req, deployedDataSource, function(err, deployedService) { + deployedDataSource = checkDataSourceService(deployedDataSource, deployedService); + cb(undefined, { + deployedDataSource: deployedDataSource, + deployedService: deployedService + }); + }); + }, + function updateDataSourceDataSet(params, cb) { + updateSingleDataSource(req, params.deployedDataSource, params.deployedService, cb); + } + ], _.noop()); + }); +} + +/** + * Getting A Deployed Service Related To A Data Source + * @param req + * @param dataSource + * @param cb + */ +function getDSDeployedService(req, dataSource, cb) { + //Get A Deployed Service Related To The Data Source + + logger.debug("getDSDeployedService ", dataSource); + + getDeployedService({ + mongoUrl: req.mongoUrl, + domain: req.params.domain, + guid: dataSource.serviceGuid, + environment: req.params.environment + }, function(err, deployedService) { + logger.debug("getDSDeployedService ", deployedService); + cb(undefined, deployedService); + }); +} + + +function checkDataSourceService(dataSource, deployedService) { + if (!deployedService) { + dataSource.validationResult = { + valid: false, + message: "The Service associated with this Data Source has not been deployed to this environment." + }; + } + + return dataSource; +} + +/** + * Handler For Validating A Data Source + * @param req + * @param res + * @param next + */ +function validate(req, res, next) { + + //Check That The Service Is Deployed, + //Try To Request Service Data + //Validate Service Data + + logger.debug("Validate Data Source", { + params: req.params, + dataSource: req.body + }); + + var dataSource = req.body || {}; + + async.waterfall([ + function getDSDeployedServ(cb) { + getDSDeployedService(req, dataSource, function(err, deployedService) { + //No Deployed service, mark the Data Source as error for saving to the database. + dataSource = checkDataSourceService(dataSource, deployedService); + + //If the data source is invalid, pass it as the error param to skip the remaining steps + if (dataSource.validationResult && !dataSource.validationResult.valid) { + return cb(dataSource); + } else { + return cb(undefined, deployedService); + } + }); + }, + function getDSServiceData(deployedService, cb) { + getServiceData(dataSource, deployedService, cb); + }, + function validateDataSource(serviceData, cb) { + logger.debug("Calling Validate ", { + uri: req.mongoUrl + }, dataSource); + + fhForms.core.dataSources.validate({ + uri: req.mongoUrl + }, dataSource, cb); + } + ], function(err, validationResult) { + if (err && err !== dataSource) { + logger.error("Error Validating Data Source", err); + return next(err); + } + + logger.debug("Validation Result ", validationResult || dataSource); + + //The Response Is Either The Full Validation Result Or It Never Got That Far. + res.json(validationResult || dataSource); + }); +} + +/** + * Handler For Removing A Data Source + * @param req + * @param res + * @param next + */ +function remove(req, res, next) { + async.waterfall([ + function(cb) { + fhForms.core.dataSources.get({ + uri: req.mongoUrl + }, {_id: req.params.id}, cb); + }, + function(dataSource, cb) { + fhForms.core.dataSources.remove({ + uri: req.mongoUrl + }, {_id: req.params.id}, function(err) { + cb(err, dataSource); + }); + }, + function(dataSource, cb) { + if (!dataSource) { + return cb(); + } + + serviceServices.removeDataSource({ + domain: req.params.domain, + guid: dataSource.serviceGuid, + dataSourceId: dataSource._id, + mongoUrl: req.mongoUrl + }, cb); + } + ], function(err) { + if (err && err.code !== "FH_FORMS_NOT_FOUND") { + return next(err); + } + + res.status(204).end(); + }); +} + +/** + * Forcing A Refresh Of A Data Source + * @param req + * @param res + * @param next + */ +function refresh(req, res, next) { + async.waterfall([ + function getDataSource(cb) { + fhForms.core.dataSources.get({ + uri: req.mongoUrl + }, {_id: req.params.id}, cb); + }, + function getDeployedServ(dataSource, cb) { + //Get A Deployed Service Related To The Data Source + getDSDeployedService(req, dataSource, function(err, deployedService) { + //No Deployed service, mark the Data Source as error for saving to the database. + dataSource = checkDataSourceService(dataSource, deployedService); + cb(err, { + dataSource: dataSource, + deployedService: deployedService + }); + }); + }, + function updateDataSourceDataSet(params, cb) { + updateSingleDataSource(req, params.dataSource, params.deployedService, cb); + }, + function getDataSourceAgain(cb) { + //Get the latest State Of The Data Source After The Forced Refresh - Nice API friendly response + fhForms.core.dataSources.get({ + uri: req.mongoUrl + }, {_id: req.params.id}, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.status(200).json(result); + }); +} + +/** + * Getting An Audit Log Related To A Data Source + * @param req + * @param res + * @param next + */ +function getAuditLogs(req, res, next) { + async.waterfall([ + function(cb) { + fhForms.core.dataSources.get({ + uri: req.mongoUrl + }, { + _id: req.params.id, + includeAuditLog: true + }, cb); + } + ], function(err, dataSourceWithAuditLog) { + if (err) { + return next(err); + } + + logger.debug("Responding With Audit Logs"); + + res.json(dataSourceWithAuditLog); + }); +} + + +/** + * getAuditLogEntry - Getting A Full Audit Log Entry Including Data + * + * @param {type} req description + * @param {type} res description + * @param {type} next description + * @return {type} description + */ +function getAuditLogEntry(req, res, next) { + async.waterfall([ + function(cb) { + fhForms.core.dataSources.getAuditLogEntry({ + uri: req.mongoUrl + }, { + _id: req.params.logid + }, cb); + } + ], function(err, auditLog) { + if (err) { + return next(err); + } + + logger.debug("Responding With Audit Logs"); + + res.json(auditLog); + }); +} + +module.exports = { + get: get, + list: list, + deploy: deploy, + validate: validate, + remove: remove, + refresh: refresh, + getAuditLog: getAuditLogs, + getAuditLogEntry: getAuditLogEntry +}; diff --git a/lib/routes/forms/dataSources/router.js b/lib/routes/forms/dataSources/router.js new file mode 100644 index 0000000..b3b1eac --- /dev/null +++ b/lib/routes/forms/dataSources/router.js @@ -0,0 +1,27 @@ +var express = require('express'); +var handlers = require('./handlers'); + +var router = express.Router({ + mergeParams: true +}); + +router.get('/', handlers.list); + +//Deploying A Data Source To An Environment +router.post('/:id/deploy', handlers.deploy); +//Getting a single audit log entry. +router.get('/audit_logs/:logid', handlers.getAuditLogEntry); + +//Listing Audit Logs +router.get('/:id/audit_logs', handlers.getAuditLog); + +router.get('/:id', handlers.get); + +router.post('/validate', handlers.validate); + +//Forcing A Refresh Of A Specific Data Source +router.post('/:id/refresh', handlers.refresh); + +router.delete('/:id', handlers.remove); + +module.exports = router; diff --git a/lib/routes/forms/submissions/handlers/export.js b/lib/routes/forms/submissions/handlers/export.js new file mode 100644 index 0000000..3c2227d --- /dev/null +++ b/lib/routes/forms/submissions/handlers/export.js @@ -0,0 +1,55 @@ +const SubmissionExportJob = require('../../../../models').SubmissionExportJob; +var jobTypes = require('../../../../models/BaseImportExportJobSchema').types; +const util = require('util'); +const express = require('express'); +var logger = require('../../../../util/logger').getLogger(); + +// TODO: hook up with submission export job running +var spawnJob = function(params, cb) { + var job = new SubmissionExportJob({ + domain: params.domain, + environment: params.environment, + jobType: jobTypes.EXPORT + }); + job.save(cb); +}; +const buildJobMiddleware = require('../../../../middleware/buildJobMiddleware'); +const middleware = buildJobMiddleware(SubmissionExportJob, spawnJob, function() { + return {jobType: jobTypes.EXPORT}; +}); + +var router = express.Router({ + mergeParams: true +}); + +router.param('job_id', middleware.find); + +router.get('/export', middleware.filteredJobs, function(req, res) { + return res.send(req.jobs); +}); + +router.post('/export', middleware.create, function(req, res) { + res.send(req.job); +}); + + +router.get('/export/:job_id', function(req, res) { + return res.send(req.job); +}); + +router.post('/export/:job_id', + middleware.ensureFinishedAndRegistered, + middleware.generateURL, + function(req, res) { + res.send(req.fileUrl); + }); + + +// eslint-disable-next-line no-unused-vars +router.use(function SubmissionsDataExportErrorHandler(err, req, res, next) { + res.statusCode = err.code || 500; + logger.error(util.inspect(err)); + res.json(err); +}); + +module.exports = router; \ No newline at end of file diff --git a/lib/routes/forms/submissions/handlers/exportCSVAsync.js b/lib/routes/forms/submissions/handlers/exportCSVAsync.js new file mode 100644 index 0000000..5953c47 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/exportCSVAsync.js @@ -0,0 +1,202 @@ +var fhForms = require('fh-forms'); +var async = require('async'); +var path = require('path'); +var fhConfig = require('fh-config'); +var fs = require('fs'); +var _ = require('underscore'); +var logger = require('../../../../util/logger').getLogger(); +var archiver = require('archiver'); +var storage = require('../../../../storage'); + +/** + * + * Registering an exported zip file for export. + * + * @param connectionOptions + * @param submissionZipFilePath + */ +function registerExportedFileForDownload(connectionOptions, submissionZipFilePath) { + + storage.registerFile(submissionZipFilePath, function(err, registeredFile) { + if (err) { + logger.error({error: err}, "Error registering submission file"); + } + + //Want to generate a url to download the zip file + storage.generateURL(registeredFile._id, null, null, function(err, submissionZipUrl) { + if (err) { + logger.error({error: err, registeredFile: registeredFile}, "Error generating submission file url"); + return; + } + + //Finish, set the export status to complete and keep url. + fhForms.core.updateCSVExportStatus(connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_COMPLETE, + message: "Exported submissions available to download here" + }); + }); + }); +} + + +/** + * Cleaning up an exported submissions file + * @param path + * @param cb + */ +function cleanUpSubmissionFile(path, cb) { + fs.stat(path, function(err, fileStats) { + if (err) { + logger.warn({error: err}, "Error reading file " + path); + return cb(err); + } + + //No file + if (!fileStats) { + return cb(); + } + + //file exists, remove it. + fs.unlink(path, function(err) { + if (err) { + logger.warn({error: err}, "Error unlinking file " + path); + } + + return cb(err); + }); + }); +} + + +/** + * Function for processing submissions into a zip file containing csv files. + * @param params + * @param params.domain The domain exported for + * @param params.environment The environment ID exported for + * @param params.connectionOptions Mongo Connection Options + * @param params.connectionOptions.uri Mongo connection string + * @param exportedSubmissionCSVs The export CSV strings for each form + * @private + */ +function processExportResponse(params, exportedSubmissionCSVs) { + exportedSubmissionCSVs = exportedSubmissionCSVs || {}; + + fhForms.core.updateCSVExportStatus(params.connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_INPROGRESS, + message: "Compressing Exported Submissions" + }); + + var fileName = params.domain + "_" + params.environment + "_" + "submissioncsvexport.zip"; + + var exportedZipFilePath = path.join(fhConfig.value("fhmbaas.temp_forms_files_dest"), fileName); + + cleanUpSubmissionFile(exportedZipFilePath, function(err) { + if (err && err.code !== "ENOENT") { + fhForms.core.updateCSVExportStatus(params.connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_ERROR, + message: "Error cleaning up submission file", + error: err + }); + + return; + } + + var zipFileStream = fs.createWriteStream(exportedZipFilePath); + var zip = archiver('zip'); + + zipFileStream.on('error', function(err) { + logger.error({error: err}, "Error zipping exported submissions"); + + fhForms.core.updateCSVExportStatus(params.connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_ERROR, + message: "Error compressing exported submissions", + error: err + }); + }); + + zipFileStream.on('close', function() { + logger.debug("Finished Export CSV Zip"); + + fhForms.core.updateCSVExportStatus(params.connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_INPROGRESS, + message: "Finished compressing exported submissions. Registering the ZIP file for download." + }); + + registerExportedFileForDownload(params.connectionOptions, exportedZipFilePath); + }); + + zip.pipe(zipFileStream); + + _.each(exportedSubmissionCSVs, function(csv, form) { + zip.append(csv, {name: form + '.csv'}); + }); + + logger.debug("Finalising Export CSV Zip"); + + zip.finalize(); + }); +} + + +/** + * + * Handler to start the export process for CSVs. This will not wait for the CSV export to start + * + * @param req + * @param res + * @param next + */ +module.exports = function exportCSVAsync(req, res, next) { + req.body = req.body || {}; + + var exportParams = { + "appId" : req.body.projectId, + "subid": req.body.subid, + "formId": req.body.formId, + "fieldHeader": req.body.fieldHeader, + "downloadUrl": req.body.fileUrl, + "filter": req.body.filter, + "query": req.body.query, + "wantRestrictions": false + }; + + async.waterfall([ + function startExportAsync(cb) { + fhForms.core.startCSVExport(req.connectionOptions, function(err, updatedCSVExportStatus) { + if (err) { + logger.error({error: err}, "Error starting CSV Export"); + } + + return cb(err, updatedCSVExportStatus); + }); + } + ], function(err, updatedCSVExportStatus) { + if (err) { + logger.error({error: err}, "Error starting CSV Export"); + return next(err); + } + + //Responding to the http request and starting the submission CSV export. + res.json(updatedCSVExportStatus || {}); + + fhForms.core.exportSubmissions(_.extend({asyncCSVExport: true}, req.connectionOptions), exportParams, function(err, submissionCsvValues) { + if (err) { + fhForms.core.updateCSVExportStatus(req.connectionOptions, { + status: fhForms.CONSTANTS.SUBMISSION_CSV_EXPORT.STATUS_ERROR, + message: "Error exporting submissions", + error: err + }); + logger.warn({error: err}, "Error exporting submissions "); + return; + } + + logger.info("Submission CSV Values Exported"); + + processExportResponse({ + domain: req.params.domain, + environment: req.params.environment, + connectionOptions: req.connectionOptions + }, submissionCsvValues); + }); + }); +}; diff --git a/lib/routes/forms/submissions/handlers/exportPdf.js b/lib/routes/forms/submissions/handlers/exportPdf.js new file mode 100644 index 0000000..455fa15 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/exportPdf.js @@ -0,0 +1,58 @@ +var fhForms = require('fh-forms'); +var fs = require('fs'); +var async = require('async'); +var fhConfig = require('fh-config'); +var logger = require('../../../../util/logger').getLogger(); + +/** + * exportPDF - Exporting A Single submission as a PDF file. + * + * @param {object} req Request Object + * @param {object} res Response Object + * @param {function} next Middleware next function. + */ +module.exports = function exportPDF(req, res, next) { + async.waterfall([ + function generateSubmissionPdf(cb) { + fhForms.core.generateSubmissionPdf({ + uri: req.mongoUrl, + _id: req.params.id, + pdfExportDir: fhConfig.value('fhmbaas.pdfExportDir'), + filesAreRemote: false, + location: req.body.coreLocation, + maxConcurrentPhantomPerWorker: fhConfig.value('fhmbaas.maxConcurrentPhantomPerWorker') + }, cb); + }], function(err, pdfFileLocation) { + if (err) { + return next(err); + } + var fileReadStream = fs.createReadStream(pdfFileLocation); + + fileReadStream.on('end', function() { + fs.unlink(pdfFileLocation, function() { + logger.debug("Submission File " + pdfFileLocation + " removed"); + }); + }); + + fileReadStream.on('error', function(error) { + logger.error("Error sending PDF file ", { + error: error, + pdfFileLocation: pdfFileLocation + }); + }); + + //If + if (!res.headersSent) { + //Want to pipe the response to the result + //Setting content-disposition as an attachement for nice browser compatibility. + res.writeHead(200, { + "Content-Disposition": 'attachment; filename="' + req.params.id + '.pdf"', + "Content-Type": 'application/pdf' + }); + fileReadStream.pipe(res); + } else { + fileReadStream.close(); + } + + }); +}; \ No newline at end of file diff --git a/lib/routes/forms/submissions/handlers/filter.js b/lib/routes/forms/submissions/handlers/filter.js new file mode 100644 index 0000000..c9a24e0 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/filter.js @@ -0,0 +1,33 @@ +var forms = require('fh-forms'); +var logger = require('../../../../util/logger').getLogger(); + +/** + * Filtering Submissions + * @param req + * @param res + * @param next + */ +module.exports = function filterSubmissions(req, res, next) { + var filterParams = { + formId: req.body.formId, + appId: req.body.appId, + paginate: { + //Populated by express-paginate middleware + page: req.query.page, + limit: req.query.limit, + filter: req.query.filter + } + }; + + logger.debug("Middleware filterSubmissions ", {params: filterParams}); + + forms.core.getSubmissions({ + uri: req.mongoUrl + }, filterParams, function(err, submissions) { + if (err) { + return next(err); + } + + res.json(submissions || {}); + }); +}; diff --git a/lib/routes/forms/submissions/handlers/getExportCSVStatus.js b/lib/routes/forms/submissions/handlers/getExportCSVStatus.js new file mode 100644 index 0000000..c27b19a --- /dev/null +++ b/lib/routes/forms/submissions/handlers/getExportCSVStatus.js @@ -0,0 +1,14 @@ +var fhForms = require('fh-forms'); +var logger = require('../../../../util/logger').getLogger(); + +module.exports = function getExportCSVStatus(req, res, next) { + + fhForms.core.getCSVExportStatus(req.connectionOptions, function(err, csvExportStatus) { + if (err) { + logger.error({error: err}, "Error getting CSV Export Status"); + return next(err); + } + + res.json(csvExportStatus || {}); + }); +}; \ No newline at end of file diff --git a/lib/routes/forms/submissions/handlers/index.js b/lib/routes/forms/submissions/handlers/index.js new file mode 100644 index 0000000..b1f3191 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/index.js @@ -0,0 +1,11 @@ + + +module.exports = { + list: require('./list'), + search: require('./search'), + filter: require('./filter'), + exportPDF: require('./exportPdf'), + exportCSVAsync: require('./exportCSVAsync'), + getExportCSVStatus: require('./getExportCSVStatus'), + resetExportCSV: require('./resetExportCSV') +}; diff --git a/lib/routes/forms/submissions/handlers/list.js b/lib/routes/forms/submissions/handlers/list.js new file mode 100644 index 0000000..6317cec --- /dev/null +++ b/lib/routes/forms/submissions/handlers/list.js @@ -0,0 +1,30 @@ +var forms = require('fh-forms'); +var logger = require('../../../../util/logger').getLogger(); +/** + * Handler for listing All Submissions + * @param req + * @param res + * @param next + */ +module.exports = function list(req, res, next) { + var listParams = { + paginate: { + //Populated by express-paginate middleware + page: req.query.page, + limit: req.query.limit, + filter: req.query.filter + } + }; + + logger.debug("Handler Submissions List ", {connectionOptions: req.connectionOptions, listParams: listParams}); + forms.core.getSubmissions({ + uri: req.mongoUrl + }, listParams, function(err, submissionsResult) { + if (err) { + logger.error("Error listing submissions", err); + return next(err); + } + + res.json(submissionsResult || {}); + }); +}; diff --git a/lib/routes/forms/submissions/handlers/resetExportCSV.js b/lib/routes/forms/submissions/handlers/resetExportCSV.js new file mode 100644 index 0000000..adc46f3 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/resetExportCSV.js @@ -0,0 +1,14 @@ +var fhForms = require('fh-forms'); +var logger = require('../../../../util/logger').getLogger(); + +module.exports = function getExportCSVStatus(req, res, next) { + + fhForms.core.resetExportCSV(req.connectionOptions, function(err, csvExportStatus) { + if (err) { + logger.error({error: err}, "Error resetting CSV Export Status"); + return next(err); + } + + res.json(csvExportStatus || {}); + }); +}; \ No newline at end of file diff --git a/lib/routes/forms/submissions/handlers/search.js b/lib/routes/forms/submissions/handlers/search.js new file mode 100644 index 0000000..511f659 --- /dev/null +++ b/lib/routes/forms/submissions/handlers/search.js @@ -0,0 +1,31 @@ +var forms = require('fh-forms'); +var _ = require('underscore'); +var logger = require('../../../../util/logger').getLogger(); + +/** + * Search For Submissions. Used For Advanced Search + * @param req + * @param res + * @param next + */ +module.exports = function search(req, res, next) { + var queryParams = req.body; + + logger.debug("Middleware Submission Search ", {params: queryParams}); + + forms.core.submissionSearch({ + uri: req.mongoUrl + }, _.extend(queryParams, { + paginate: { + //Populated by express-paginate middleware + page: req.query.page, + limit: req.query.limit + } + }), function(err, submissions) { + if (err) { + return next(err); + } + + res.json(submissions || {}); + }); +}; diff --git a/lib/routes/forms/submissions/router.js b/lib/routes/forms/submissions/router.js new file mode 100644 index 0000000..8368db1 --- /dev/null +++ b/lib/routes/forms/submissions/router.js @@ -0,0 +1,76 @@ +var express = require('express'); +var fhForms = require('fh-forms'); +var paginate = require('express-paginate'); +var submissionsMiddleware = fhForms.middleware.submissions; +var formsMiddleware = fhForms.middleware.forms; +var fhConfig = require('fh-config'); +var handlers = require('./handlers'); +var CONSTANTS = require('../../../constants'); +var dataExport = require('./handlers/export'); +var multer = require('multer'); + +module.exports = function() { + var router = express.Router({ + mergeParams: true + }); + + //Multipart Form Request Parser + router.use(multer({ + dest: fhConfig.value("fhmbaas.temp_forms_files_dest") + })); + + //List Form Submissions (Paginated) + router.get('/', paginate.middleware(fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY), fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY)), handlers.list); + + //Creating A New Submission + router.post('/', function(req, res, next) { + //Setting some studio defaults to mark the submission as created by studio + req.params.projectid = req.body.appId || "FHC"; + + next(); + }, formsMiddleware.submitFormData); + + //Marking A Pending Submission As Complete + router.post('/:id/complete', submissionsMiddleware.completeSubmission); + + //Search For A Submission Using Custom Parameters (Paginated) + router.post('/search', paginate.middleware(fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY), fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY)), handlers.search); + + router.post('/export/async', handlers.exportCSVAsync); + + router.get('/export/async/status', handlers.getExportCSVStatus); + + router.post('/export/async/reset', handlers.resetExportCSV); + + //Export Submissions Using Custom Parameters + router.post('/export', submissionsMiddleware.exportCSV); + + //Filtering Submissions (Paginated) + router.post('/filter', paginate.middleware(fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY), fhConfig.value(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY)), handlers.filter); + + //Export A Submission As A PDF Document + router.post('/:id/exportpdf', handlers.exportPDF); + + //Get A Single Submission + router.get('/:id', submissionsMiddleware.get); + + //Update A File For A Submission + router.post('/:id/fields/:fieldId/files/:fileId', submissionsMiddleware.getRequestFileParameters, submissionsMiddleware.addSubmissionFile); + + //Update A File For A Submission + router.put('/:id/fields/:fieldId/files/:fileId', submissionsMiddleware.getRequestFileParameters, submissionsMiddleware.updateSubmissionFile); + + //Update A Single Submission + router.put('/:id', submissionsMiddleware.update); + + //Delete A Single Submission + router['delete']('/:id', submissionsMiddleware.remove); + + //Get A File Related To A Submission + router.get('/:id/files/:fileId', submissionsMiddleware.getSubmissionFile, submissionsMiddleware.processFileResponse); + + // Submission export handlers + router.use('/data', dataExport); + + return router; +}; diff --git a/lib/routes/services/handlers.js b/lib/routes/services/handlers.js new file mode 100644 index 0000000..35d8212 --- /dev/null +++ b/lib/routes/services/handlers.js @@ -0,0 +1,104 @@ +var async = require('async'); +var serviceServices = require('../../services/services'); + +/** + * Handler For Listing Services + * @param req + * @param res + * @param next + */ +function get(req, res, next) { + async.waterfall([ + function(cb) { + serviceServices.get({ + domain: req.params.domain, + guid: req.params.guid, + mongoUrl: req.mongoUrl + }, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.json(result); + }); +} + +/** + * Handler For Listing Services + * @param req + * @param res + * @param next + */ +function list(req, res, next) { + async.waterfall([ + function(cb) { + serviceServices.findServicesForDomain({ + domain: req.params.domain, + mongoUrl: req.mongoUrl + }, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.json(result); + }); +} + +/** + * Handler For Deploying A Service To The Mbaas + * @param req + * @param res + * @param next + */ +function deploy(req, res, next) { + async.waterfall([ + function(cb) { + serviceServices.deploy({ + mongoUrl: req.mongoUrl, + domain: req.params.domain, + service: req.body + }, cb); + } + ], function(err, result) { + if (err) { + return next(err); + } + + res.json(result); + }); +} + +/** + * Handler For Removing A Service + * @param req + * @param res + * @param next + */ +function remove(req, res, next) { + async.waterfall([ + function(cb) { + serviceServices.remove({ + mongoUrl: req.mongoUrl, + guid: req.params.guid, + domain: req.params.domain + }, cb); + } + ], function(err) { + if (err) { + return next(err); + } + + res.status(204).end(); + }); +} + +module.exports = { + get: get, + list: list, + deploy: deploy, + remove: remove +}; \ No newline at end of file diff --git a/lib/routes/services/router.js b/lib/routes/services/router.js new file mode 100644 index 0000000..3fc0dfd --- /dev/null +++ b/lib/routes/services/router.js @@ -0,0 +1,30 @@ +var express = require('express'); +var common = require('../../util/common.js'); +var logger = require('../../util/logger').getLogger(); + +var fhmbaasMiddleware = require('fh-mbaas-middleware'); + +var handlers = require('./handlers'); + +var router = express.Router({ + mergeParams: true +}); + +//Getting The Relevant Environment Database. +router.use(fhmbaasMiddleware.envMongoDb.getOrCreateEnvironmentDatabase); + +router.get('/', handlers.list); + +router.get('/:guid', handlers.get); + +router.delete('/:guid', handlers.remove); + +router.post('/:guid/deploy', handlers.deploy); + +//jshint unused:false +router.use(function(err, req, res, next) { + logger.error("Error In Service Request", err ); + return common.handleError(err, "Service Error", 500, req, res); +}); + +module.exports = router; diff --git a/lib/services/appForms/dataSources/index.js b/lib/services/appForms/dataSources/index.js new file mode 100644 index 0000000..be0f2ba --- /dev/null +++ b/lib/services/appForms/dataSources/index.js @@ -0,0 +1,5 @@ + + +module.exports = { + listForUpdate: require('./listForUpdate') +}; \ No newline at end of file diff --git a/lib/services/appForms/dataSources/listForUpdate.js b/lib/services/appForms/dataSources/listForUpdate.js new file mode 100644 index 0000000..72f533c --- /dev/null +++ b/lib/services/appForms/dataSources/listForUpdate.js @@ -0,0 +1,13 @@ +var fhForms = require('fh-forms'); + +module.exports = function listDataSourcesForUpdate(params, cb) { + //Only Want Data Sources That Need To Be Updated. I.e. lastUpdated + interval < currentTime + fhForms.core.dataSources.list({ + uri: params.mongoUrl + }, { + currentTime: params.currentTime, + listDataSourcesNeedingUpdate: true + }, function(err, dataSourcesNeedingUpdate) { + cb(err, dataSourcesNeedingUpdate || []); + }); +}; \ No newline at end of file diff --git a/lib/services/appmbaas/getDeployedService.js b/lib/services/appmbaas/getDeployedService.js new file mode 100644 index 0000000..360b384 --- /dev/null +++ b/lib/services/appmbaas/getDeployedService.js @@ -0,0 +1,29 @@ +var fhMbaasMiddleware = require('fh-mbaas-middleware'); +var logger = require('../../util/logger').getLogger(); + + +/** + * Listing Deployed Services + * @param params + * - domain: Domain + * - environment: Environment + * - guid: Service Guid + * @param cb + */ +module.exports = function getDeployedService(params, cb) { + + logger.debug("getDeployedService ", params); + + if (!params.domain || !params.environment || !params.guid) { + logger.error("getDeployedService: Invalid Parameters, ", {domain: !params.domain, environment: !params.environment}); + return cb(new Error("Missing Parameter: " + (params.domain ? "environment" : "domain"))); + } + + fhMbaasMiddleware.appmbaas().findOne({domain: params.domain, environment: params.environment, isServiceApp: true, guid: params.guid}, function(err, serviceApps) { + if (err) { + logger.error("Error Getting Deployed Service ", {error: err, params: params}); + } + + cb(err, serviceApps); + }); +}; \ No newline at end of file diff --git a/lib/services/appmbaas/listDeployedApps.js b/lib/services/appmbaas/listDeployedApps.js new file mode 100644 index 0000000..cee28d8 --- /dev/null +++ b/lib/services/appmbaas/listDeployedApps.js @@ -0,0 +1,15 @@ +'use strict'; +let fhMbaasMiddleware = require('fh-mbaas-middleware'); +let logger = require('../../util/logger').getLogger(); + +//listDeployedAppsForEnvironment takes a domain and environment and returns the apps that are deployed to that environment. +module.exports = function listDeployedAppsForEnvironment(domain, environment,cb) { + logger.debug("listDeployedAppsForEnvironment ", domain, environment); + + if (!domain || !environment) { + logger.error("listDeployedAppsForEnvironment: Invalid Parameters, ", { domain: domain, environment: environment }); + return cb(new Error("Missing Parameter: " + (domain ? "environment" : "domain"))); + } + + fhMbaasMiddleware.appmbaas().find({ domain: domain, environment: environment }, cb); +}; diff --git a/lib/services/appmbaas/listDeployedServices.js b/lib/services/appmbaas/listDeployedServices.js new file mode 100644 index 0000000..32001d6 --- /dev/null +++ b/lib/services/appmbaas/listDeployedServices.js @@ -0,0 +1,28 @@ +var fhMbaasMiddleware = require('fh-mbaas-middleware'); +var logger = require('../../util/logger').getLogger(); + + +/** + * Listing Deployed Services + * @param params + * - domain: + * - environment: + * @param cb + */ +module.exports = function listDeployedServices(params, cb) { + + logger.debug("listDeployedServices ", params); + + if (!params.domain || !params.environment) { + logger.error("listDeployedServices: Invalid Parameters, ", {domain: !params.domain, environment: !params.environment}); + return cb(new Error("Missing Parameter: " + (params.domain ? "environment" : "domain"))); + } + + fhMbaasMiddleware.appmbaas().find({domain: params.domain, environment: params.environment, isServiceApp: true}, function(err, serviceApps) { + if (err) { + logger.error("Error Getting Deployed Services ", {error: err, params: params}); + } + + cb(err, serviceApps || []); + }); +}; \ No newline at end of file diff --git a/lib/services/appmbaas/removeAppDb.js b/lib/services/appmbaas/removeAppDb.js new file mode 100644 index 0000000..0058b56 --- /dev/null +++ b/lib/services/appmbaas/removeAppDb.js @@ -0,0 +1,45 @@ +'use strict'; +const ditchhelper = require('../../util/ditchhelper.js'); +const config = require('fh-mbaas-middleware').config(); +const logger = require('../../util/logger').getLogger(); + +/** + * removeAppDb is responsible for removing the data and db associated with an app in a given environment. + * @param mongo {Object} + * @param domain {string} + * @param appModel {Object} + * @param environment {string} + * @param next {function} + */ +module.exports = function removeAppDb(mongo, domain, appModel, environment, next) { + if (!appModel.migrated && appModel.type !== 'openshift3') { + // Call ditch to remove collection + ditchhelper.removeAppCollection(appModel.name, function complete(err, result) { + if (err) { + return next(new Error('Error when try to remove db for app ' + appModel.name, err)); + } + logger.debug({ app: appModel.name, result: result }, 'App collections removed'); + removeAppModel(appModel, next); + }); + } else { + // App has a per-app db (either migrated or created by default) + // Remove from mongo directly + mongo.dropDb(config, appModel.dbConf.user, appModel.dbConf.name, function complete(err) { + if (err) { + return next(new Error('Request to remove db for app ' + appModel.name, err)); + } + logger.debug({ app: appModel.name }, 'App database is removed'); + removeAppModel(appModel, next); + }); + } +}; + +function removeAppModel(appModel, next) { + appModel.remove(function complete(err,removed) { + if (err) { + return next(new Error('Removing app mbaas instance ' + err.message)); + } else { + next(undefined, removed); + } + }); +} diff --git a/lib/services/environment/deleteEnvironmentData.js b/lib/services/environment/deleteEnvironmentData.js new file mode 100644 index 0000000..663478d --- /dev/null +++ b/lib/services/environment/deleteEnvironmentData.js @@ -0,0 +1,51 @@ +'use strict'; + +const async = require('async'); +const mongo = require('../../util/mongo.js'); +const fhmbaasMiddleware = require('fh-mbaas-middleware'); +const getDeployedApps = require('../../services/appmbaas/listDeployedApps'); +const removeAppDb = require('../../services/appmbaas/removeAppDb.js'); +const logger = require('../../util/logger').getLogger(); +/** + * deleteEnvironmentData will remove the app databases and the environment database associated with an environment + * @param domain {string} + * @param environment {string} + * @param callback {function} + */ +module.exports = function deleteEnvironmentData(domain, environment, callback) { + async.waterfall([ + function getApps(callback) { + getDeployedApps(domain, environment, callback); + }, + function removeAppDbs(apps, callback) { + if (!apps || apps.length === 0) { + return callback(); + } + var errors = []; + function removeCallback(app, callback) { + return function(err) { + if (err) { + logger.error("error removing appdb ", app); + //collect our error and continue removing dbs + errors.push(err); + } + return callback(); + }; + } + async.each(apps, function removeDb(app, cb) { + removeAppDb(mongo, domain, app, environment, removeCallback(app, cb)); + }, function done(err) { + if (err) { + return callback(err); + } + if (errors.length !== 0) { + return callback(errors); + } + return callback(); + }); + }, + function removeEnvironmentDb(callback) { + fhmbaasMiddleware.envMongoDb.dropEnvironmentDatabase(domain, environment, callback); + } + ], callback); +}; diff --git a/lib/services/services/index.js b/lib/services/services/index.js new file mode 100644 index 0000000..93458ef --- /dev/null +++ b/lib/services/services/index.js @@ -0,0 +1,120 @@ +var fhServiceAuth = require('fh-service-auth'); +var _ = require('underscore'); + +function get(params, cb) { + + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + serviceModel.findOne({ + guid: params.guid, + domain: params.domain + }, {lean: true}, cb); +} + +function list(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + serviceModel.find({ + domain: params.domain + }, {lean: true}, cb); +} + +function addDataSource(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + + if (!serviceModel) { + return cb(new Error("No Service Model Defined For The Mongo Connection")); + } + + serviceModel.updateDataSources({ + guid: params.guid, + domain: params.domain, + dataSourceIds: [params.dataSourceId], + addDataSource: true + }, function(err) { + return cb(err); + }); +} + +function removeDataSource(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + + if (!serviceModel) { + return cb(new Error("No Service Model Defined For The Mongo Connection")); + } + + serviceModel.removeDataSource({ + guid: params.guid, + domain: params.domain, + dataSourceIds: [params.dataSourceId] + }, function(err) { + return cb(err); + }); +} + +function findServicesForDomain(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + + if (!serviceModel) { + return cb(new Error("No Service Model Defined For The Mongo Connection")); + } + + serviceModel.find({ + domain: params.domain + }, cb); +} + +//Updating The Service Entry In The Mbaas +function deploy(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + + if (!serviceModel) { + return cb(new Error("No Service Model Defined For The Mongo Connection")); + } + + serviceModel.findOneOrCreate({ + domain: params.domain, + guid: params.service.guid + }, params.service, function(err, serviceModel) { + if (err) { + return cb(err); + } + + _.extendOwn(serviceModel, params.service); + serviceModel.save(cb); + }); +} + +function remove(params, cb) { + var serviceModel = fhServiceAuth.model.get(params.mongoUrl); + + if (!serviceModel) { + return cb(new Error("No Service Model Defined For The Mongo Connection")); + } + + serviceModel.findOne({ + domain: params.domain, + guid: params.guid + }, function(err, service) { + if (err) { + return cb(err); + } + + //Doesn't exist anyway. Not An Error + if (!service) { + return cb(); + } + + service.remove(cb); + }); +} + + + +module.exports = { + get: get, + list: list, + addDataSource: addDataSource, + removeDataSource: removeDataSource, + findServicesForDomain: findServicesForDomain, + deploy: deploy, + remove: remove +}; \ No newline at end of file diff --git a/lib/storage/functional_test.js b/lib/storage/functional_test.js new file mode 100644 index 0000000..69bb387 --- /dev/null +++ b/lib/storage/functional_test.js @@ -0,0 +1,20 @@ +// TEST file that registers sample file to platform and exposes url. +// TO be removed after some client would be put in place + +var storage = require("./storage"); +var logger = require('../util/logger').getLogger(); +var path = require('path'); + +// Use project readme as test file +var filePath = path.resolve(__dirname, "../../README.md"); +module.exports = function() { + logger.trace("STARTING DOWNLOAD TESTS"); + storage.registerFile(filePath, function(err, fileId) { + logger.trace("REGISTERED FILE", {err: err, fileId: fileId}); + if (fileId) { + storage.generateURL(fileId, null, function(err, data) { + logger.trace("GENERATED URL FOR FILE", {err: err, data: data}); + }); + } + }); +}; diff --git a/lib/storage/impl/router.js b/lib/storage/impl/router.js new file mode 100644 index 0000000..15a3a2d --- /dev/null +++ b/lib/storage/impl/router.js @@ -0,0 +1,230 @@ +var express = require('express'); +var config = require('fh-config'); +var logger = require('../../util/logger').getLogger(); +var storage = require(".."); +var multer = require('multer'); +var path = require('path'); +var async = require('async'); +var mkdirp = require('mkdirp'); +var os = require('os'); + +var AppdataJob = require('../../models').AppdataJob; + +const UPLOAD_PATH = config.value("fhmbaas.appdata_jobs.upload_dir"); + +var upload = multer({ + dest: UPLOAD_PATH, + changeDest: function(dest, req) { + var domain = req.params.domain + , jobid = req.params.jobId + , appid = req.params.appid + , env = req.params.env + , newDest = path.join(dest, domain, env, appid, jobid); + + // Need to use sync method here because the `changeDest` function + // of multer is not async. Also `mkdirp.sync` does not return + // any useful information in case of error. So we have to trust + // the underlying node fh functions to throw in case of error. + try { + mkdirp.sync(newDest); + } catch (e) { + logger.error("Error creating upload dir", e); + // Don't continue + throw e; + } + + // Success. Save the full upload path in the request for + // later usage + logger.info("Uploading file to " + newDest); + req.params.fullUploadPath = newDest; + return newDest; + } +}); + +var router = express.Router({ + mergeParams: true +}); + +/** + * Download route. + * + * Provides binary content for specified parameters. + * + * @param resourceId - id of resource to be dowloaded + * @queryParam token - token that will be used to download file + * + * @return binaryFile (application/octet-stream) + */ +router.get('/:resourceId', function(req, res) { + var fileReference = req.params.resourceId; + var tokenId = req.query.token; + storage.getFileIfValid(fileReference, tokenId, function(err, found) { + if (err) { + return res.status(err.code || 500) + .send("Cannot download file. Reason:" + err); + } + if (!found) { + return res.status(404) + .send("Invalid or outdated resource URL. Please generate new URL."); + } + var options = { + root: found.directory, + dotfiles: 'deny', + headers: { + 'x-timestamp': Date.now(), + + // Never open content in browser, force download + "Content-Type": "application/octet-stream", + + // hint for browser. + "Content-Disposition": "attachment; filename=" + found.fileName + } + }; + res.sendFile(found.fileName, options, function(err) { + if (err) { + logger.error("Problem when sending file to client", {err: err}); + return res.status(err.status).end(); + } + logger.info('Sent:', found.fileName); + }); + }); +}); + +/** + * Upload route. This route will be invoked for appdata tar archive uploads. + * It must interact with a job entity and set it's state according to the + * upload status. + * + * @param resourceId pre-registered file reference to upload to + * @queryParam token the token that will be used to upload file + */ +router.post('/:jobId/:resourceId/', function(req, res) { + var fileReference = req.params.resourceId; + var tokenId = req.query.token; + var jobId = req.params.jobId; + + /** + * Inject appid, domain and environment from the job into the request + * because that information is needed to create the correct upload + * path + */ + function injectDetails(done) { + async.waterfall([ + async.apply(AppdataJob.findById.bind(AppdataJob), jobId), + function(job, cb) { + if (!job) { + return cb(new Error("Job with id " + jobId + " could not be found")); + } + + req.params.appid = job.appid; + req.params.domain = job.domain; + req.params.env = job.environment; + cb(); + } + ], done); + } + + /** + * If the upload fails for whatever reason (connection lost, + * out-of disk space) we have to mark the job as failed. + * @param reason Fail reason. String or exception object. + */ + function failJob(reason, done) { + async.waterfall([ + async.apply(AppdataJob.findById.bind(AppdataJob), jobId), + function(job, cb) { + if (!job) { + return cb(); + } + job.fail(reason && reason.message || reason, cb); + } + ], done); + } + + /** + * If the upload finishes successfully and the file could be + * stored on disk we have to update it's metadata in the job + * model. + * + * We do not use the original file name to save the uploaded file + * to avoid clashes. So after the upload has finished we have to + * set the actual filename so that the scheduler can pick it up. + * + * @param fullPath The full path of the uploaded file + */ + function commitJob(done) { + if (!req.params.fullUploadPath) { + return done(new Error("Upload path could not be created")); + } + + async.waterfall([ + async.apply(AppdataJob.findById.bind(AppdataJob), jobId), + function(job, cb) { + var fileNamePath = path.join(req.params.fullUploadPath, req.files.file.name); + // Set the actual file name after the file is stored + // on disk (different to the original file name) + job.updateMetadata("filePath", fileNamePath); + + // To tell the scheduler that the job can be started + job.updateMetadata("uploadFinished", true); + + // Store the hostnme where this was uploaded so that the correct + // scheduler can pick this one up. This is to make sure that import + // also works in setups without a shared location + job.updateMetadata("shipHostname", os.hostname()); + + job.save(cb); + } + ], done); + } + + // Upload + async.waterfall([ + // Inject appid, env and domain + injectDetails, + + // Check if the transmitted file id and token are valid + async.apply(storage.checkToken, fileReference, tokenId), + + // Process the actual upload + function(file, callback) { + upload(req, res, callback); + }, + + commitJob + ], function(err, result) { + if (err) { + return failJob(err, function() { + return res.status(500).send({ + message: "Upload failed with " + err + }); + }); + } + + res.status(200).send(result); + }); +}); + +/** + * Host route. + * + * Fetch mbaas host (mbaas url) to determine where resource was stored + * Internal endpoint used by proxy to determine which mbaas should be called to get file + */ +router.get('/host/:resourceId', function(req, res) { + var fileReference = req.params.resourceId; + storage.getFileDetails(fileReference, function(err, found) { + var response = {}; + if (err) { + logger.error("Cannot get data", {err: err}); + res.status(404); + response.message = err; + } else { + response.host = found.host; + } + res.json(response); + }); +}); + +module.exports = router; + diff --git a/lib/storage/index.js b/lib/storage/index.js new file mode 100644 index 0000000..e7ebafc --- /dev/null +++ b/lib/storage/index.js @@ -0,0 +1,243 @@ +var fhconfig = require('fh-config'); +var logger = require('../util/logger').getLogger(); +var os = require("os"); +var FileModel = require("./models/FileSchema").Model; +var URL = require('url'); +var common = require('../util/common'); +var async = require('async'); + +var path = require('path'); +// path.isAbsolute polyfill +if (!path.isAbsolute) { + path.isAbsolute = function(path) { + return path.charAt(0) === '/'; + }; +} + +/** + * fh-storage module + * + * Expose files internal files to be downloaded by clients. + */ +var storage = module.exports; + +/** + * Register new file to be exposed externally with download url. + * + * @param fileLocation - file location in local filesystem + * @param callback + * @callbackreturns fileReference - reference to registered file that should be stored and passed to generateURL method + * + * @exception file does not exist or it's not accessible + * @exception location provided is a folder. Folders aren't supported by current implementation + */ +storage.registerFile = function(fileLocation, callback) { + common.readFileSize(fileLocation, function(err, fileSize) { + if (err) { + return callback(err); + } + var data = { + directory: path.dirname(fileLocation), + fileName: path.basename(fileLocation), + host: os.hostname(), + size: fileSize + }; + var model = new FileModel(data); + model.save(function(err, newModel) { + if (err) { + logger.error("File model save failed", {err: err, data: data}); + return callback(err); + } + return callback(null, newModel); + }); + }); +}; + +/** + * Register a new file for upload. In that case the file is not physically present in the location + * yet but we already create a database entry for it. We do that so we can create an upload URL and + * provide token authentication. + * + * @param futureFileLocation The location the file will be stored + * @param callback + */ +storage.registerFileForUpload = function(futureFileLocation, futureFileSize, callback) { + var data = { + directory: path.dirname(futureFileLocation), + fileName: path.basename(futureFileLocation), + host: os.hostname(), + size: futureFileSize + }; + var model = new FileModel(data); + model.save(function(err, newModel) { + if (err) { + logger.error("File model save failed", {err: err, data: data}); + return callback(err); + } + return callback(null, newModel); + }); +}; + +storage.deleteFile = function(fileId, callback) { + FileModel.findById(fileId, function(err, found) { + if (err || !found) { + logger.warn("Cannot obtain file model " + fileId); + if (err) { + logger.error(err); + } + return callback(new Error("Invalid file id")); + } + found.remove(callback); + }); +}; + + +/** + * Generate URL that will be used to upload or download the resource. For downloads use this URL with + * `GET`. For uploads use `POST`. + * + * Link will be valid for specified amount of time (expiresIn) + * + * @param fileReference - String reference returned for registration method + * @param expiresIn - number of seconds for URL to be valid. After that time new URL will be required. Can be null + * @param callback + * @callbackreturns url string - full URI to resource that would be + * + * @exception fileReference is invalid. File should be registered first to obtain reference id. + * @exception file no longer exist or file contents changed + * + */ +storage.generateURL = function(fileId, jobId, expiresIn, callback) { + if (expiresIn <= 0) { + expiresIn = fhconfig.value('storage.token_exp_time'); + } + storage.getFileDetails(fileId, function(err, found) { + if (err) { + return callback(err); + } + found.generateToken(expiresIn, function(err, token) { + if (err) { + return callback(err); + } + callback(null, buildURLObj(fileId, token._id.toString(), jobId)); + }); + }); +}; + +/** + * Get file details + * + * @param fileReference - String reference returned for registration method + * @callbackreturns file details including location and size + * + * @exception fileReference is invalid. File should be registered first to obtain reference id. + */ +function getFileDetails(fileId, callback) { + FileModel.findById(fileId, function(err, found) { + if (err || !found) { + logger.warn("Cannot obtain file model " + fileId); + if (err) { + logger.error(err); + } + return callback(new Error("Invalid file id")); + } + callback(null, found); + }); +} +storage.getFileDetails = getFileDetails; + +/** + * Update the name of a file once it's upload has finished. The reason is that + * the file upload middleware will use a random name to avoid clashes. + * + * @param fileId The file ID + * @param newSize The total size of the file + * @param callback + */ +storage.updateFileName = function(fileId, newName, callback) { + FileModel.findById(fileId, function(err, found) { + if (err || !found) { + logger.warn("Cannot obtain file model " + fileId); + if (err) { + logger.error(err); + } + return callback(new Error("Invalid file id")); + } + + found.fileName = newName; + found.save(callback); + }); +}; + +storage.getFileIfValid = function(fileId, token, cb) { + async.waterfall([ + async.apply(storage.checkToken, fileId, token), + function(found, callback) { + var fullPath = path.resolve(found.directory, found.fileName); + common.readFileSize(fullPath, function(err, size) { + if (err) { + err.code = 500; + return callback(err); + } + + if (!size) { + var error = new Error("File no longer exists"); + error.code = 404; + return callback(error); + } + return callback(null, found); + }); + + } + ], function(err, result) { + if (err) { + return cb(err); + } + + cb(null, result); + }); +}; + +storage.checkToken = function(fileId, token, callback) { + getFileDetails(fileId, function(err, found) { + var error; + if (err) { + err.code = 404; + return callback(err); + } + + if (!found.hasValidToken(token)) { + error = new Error("Invalid token"); + error.code = 401; + return callback(error); + } + + return callback(null, found); + }); +}; + +/** + * Build URL object for dowloading file. + */ +function buildURLObj(fileId, token, jobId) { + var urlObj = { + protocol: fhconfig.value('storage.base_url_protocol'), + host: fhconfig.value('storage.base_url_host'), + + // Download route does not require a jobId, but upload route does + // Support both cases + pathname: "/api/storage/" + (jobId ? (jobId + '/') : '') + fileId, + query:{token: token} + }; + + var url = URL.format(urlObj); + return { + url: url + }; +} + +/** + * Router export from internal implementation + * @type {express.Router} + */ +storage.router = require('./impl/router.js'); \ No newline at end of file diff --git a/lib/storage/models/FileSchema.js b/lib/storage/models/FileSchema.js new file mode 100644 index 0000000..9e8e8cc --- /dev/null +++ b/lib/storage/models/FileSchema.js @@ -0,0 +1,74 @@ +var Schema = require('mongoose').Schema; +var timestamps = require('mongoose-timestamp'); +var _ = require('underscore'); +var TokenSchema = require('./TokenSchema'); + +var FileSchema = new Schema({ + // File directory + directory: { + type: String, + required: true + }, + // File name + fileName: { + type: String, + required: true + }, + // Mbaas hostname used to determine where data is stored + host: { + type: String, + required: true + }, + tokens: [TokenSchema], + // File size + size: { + type: Number, + required: false + } +}); + +FileSchema.index({fileName: 1}, {unique: false}); + +FileSchema.plugin(timestamps, { + createdAt: 'created', + updatedAt: 'modified' +}); + +/** + * Generates a new token + * @param {Function(TokenSchema)} cb node-style callback + */ +FileSchema.methods.generateToken = function(expiresIn, cb) { + if (!expiresIn || expiresIn < 0) { + expiresIn = -1; + } + var currentTime = new Date().getTime(); + var timeToLive = currentTime + (expiresIn * 1000); + this.tokens.push({ + timeToLive: timeToLive + }); + var self = this; + + this.save(function(err) { + if (err) { + return cb(err); + } + cb(null, self.tokens[self.tokens.length - 1]); + }); +}; + +FileSchema.methods.hasValidToken = function(id) { + var token = _.find(this.tokens, function(t) { + return t._id.toString() === id; + }); + // token found and not expired + return !!token && !token.isExpired(); +}; + +exports.FileSchema = FileSchema; +exports.TokenSchema = TokenSchema; + +exports.createModel = function(connection) { + exports.Model = connection.model('File', FileSchema); + return exports.Model; +}; diff --git a/lib/storage/models/TokenSchema.js b/lib/storage/models/TokenSchema.js new file mode 100644 index 0000000..c84a108 --- /dev/null +++ b/lib/storage/models/TokenSchema.js @@ -0,0 +1,20 @@ +var Schema = require('mongoose').Schema; +var timestamps = require('mongoose-timestamp'); + +var TokenSchema = new Schema({ + timeToLive: Date +}); + +TokenSchema.plugin(timestamps, { + createdAt: 'created' +}); + +TokenSchema.methods.equals = function(otherToken) { + return this._id === otherToken._id; +}; + +TokenSchema.methods.isExpired = function() { + return this.timeToLive.getTime() < new Date().getTime(); +}; + +module.exports = TokenSchema; \ No newline at end of file diff --git a/lib/util/amqp.js b/lib/util/amqp.js new file mode 100644 index 0000000..5ca2fc7 --- /dev/null +++ b/lib/util/amqp.js @@ -0,0 +1,57 @@ +'use strict'; +var amqp = require('fh-amqp-js'); +var fs = require('fs'); +var _ = require('underscore'); + +/** + * Connect to an AMQP message bus based on + * the configuration provided + * @param configuration + * @param onReady a callback function will be invoked when the connection is ready. + * Note: this function will be invoked *EVERYTIME* the connection is established + */ +exports.connect = function(config, onReady) { + var internalConnection; + var logger = require('./logger').getLogger(); + if (!config || !config.fhamqp || !config.fhamqp.enabled) { + logger.error("AMQP not enabled. Please check conf.json file."); + return; + } + internalConnection = new amqp.AMQPManager(config.fhamqp.vhosts.internal); + function error(err) { + logger.error("error when connecting to amqp:" + err); + throw new Error("amqp failed not continuing " + err); + } + function ready() { + logger.info("connected to amqp "); + if (onReady && _.isFunction(onReady)) { + onReady(); + } + } + internalConnection.on('error', error); + internalConnection.on('ready', ready); + internalConnection.connectToCluster(); + return internalConnection; +}; + +exports.getExchangePrefix = function() { + var fhconfig = require('fh-config'); + var logger = require('./logger').getLogger(); + + var prefix = ''; + var ops_info_file_path = fhconfig.value('ops_env_file'); + if (fs.existsSync(ops_info_file_path)) { + var ops_info = require(ops_info_file_path); + if (ops_info && ops_info.env && ops_info.env.id) { + prefix = ops_info.env.id; + } + } else { + logger.error("Cannot find ops info file. ", {location: ops_info_file_path}); + } + logger.warn('Connecting to amqp with prefix ' + prefix + '. ops file path ' + ops_info_file_path); + return prefix; +}; + +exports.VHOSTS = { + "INTERNAL":"internal" +}; diff --git a/lib/util/common.js b/lib/util/common.js new file mode 100755 index 0000000..a6399ee --- /dev/null +++ b/lib/util/common.js @@ -0,0 +1,201 @@ +var fs = require('fs'); +var path = require('path'); +var util = require('util'); +var _ = require('underscore'); +var assert = require('assert'); +var logger = require('../util/logger').getLogger(); + +/** + * First step to creating common error codes. + * + * TODO: Make A Module.. + * @param err + * @param msg + * @param code + * @returns {{error: string}} + */ +function buildErrorObject(params) { + params = params || {}; + + var httpCode = params.httpCode || 500; + //If the userDetail is already set, not building the error object again. + if (params.err && params.err.userDetail) { + return { + errorFields: params.err, + httpCode: httpCode + }; + } + + var err = params.err || {message: "Unexpected Error"}; + var msg = params.msg || params.err.message || "Unexpected Error"; + //Custom Error Code + var code = params.code || "FH-MBAAS-ERROR"; + + var response = { + errorFields: { + userDetail: msg, + systemDetail: msg + ' - ' + util.inspect(err), + code: code + }, + httpCode: httpCode + }; + if (params.explain) { + response.errorFields.explain = params.explain; + } + + return response; +} + +// set default response headers +function setResponseHeaders(res) { + if (res.setHeader) { + var contentType = res.getHeader('content-type'); + if (!contentType) { + res.setHeader('Content-Type', 'application/json'); + } + } +} + +// get version from package.json (note this is softly cached) +var pkg; +function getVersion(cb) { + if (pkg) { + return cb(null, pkg.version); + } + + fs.readFile(path.join(__dirname, '../../package.json'), function(err, data) { + if (err) { + return cb(err, null); + } + pkg = JSON.parse(data); + return cb(null, pkg.version); + }); +} + +function logError(err, msg, code, req) { + var e = err; + if (typeof err === 'object') { + e = util.inspect(err); + } + logger.error({err: e, code: code, req: req}, msg); +} + +// generic error handler +function handleError(err, msg, code, req, res) { + logError(err, msg, code, req); + + var response = buildErrorObject({ + err: err, + msg: msg, + httpCode: code + }); + + logger.debug("Handling Error", {error: response}); + + res.statusCode = response.httpCode; + res.end(JSON.stringify(response.errorFields)); +} + +function getIPAddress(req) { + var ipAddress = "nonset"; // default value + + if (req.headers && req.headers['x-forwarded-for']) { + ipAddress = req.headers['x-forwarded-for']; // this may be a comma seperated list of addresses added by proxies and load balancers + } else if (req.connection && req.connection.remoteAddress) { + ipAddress = req.connection.remoteAddress; + } + + return ipAddress; +} + +// converts an object with arbitrary keys into an array sorted by keys +function sortObject(obj) { + assert.ok(_.isObject(obj), 'Parameter should be an object! - ' + util.inspect(obj)); + assert.ok(!_.isArray(obj), 'Parameter should be an object, got array: ' + util.inspect(obj)); + + var sortedKeys = _.keys(obj).sort(); + var sortedObjs = []; + + _.each(sortedKeys, function(key) { + var val = {}; + val[key] = obj[key]; + sortedObjs.push(val); + }); + + return sortedObjs; +} + +function randomPassword() { + var n = 13; + var a = 'qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890'; + return make_passwd(n,a); +} + +function randomUser() { + var n = 12; + var a = 'qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890'; + var randomStr = make_passwd(n, a); + return 'u' + randomStr; +} + +function make_passwd(n,a) { + var index = (Math.random() * (a.length - 1)).toFixed(0); + return n > 0 ? a[index] + make_passwd(n - 1, a) : ''; +} + +function checkDbConf(db) { + /*eslint-disable */ + assert.ok(null != db.host, 'db host is null'); //jshint ignore:line + assert.ok(null != db.port, 'db port is null'); //jshint ignore:line + assert.ok(null != db.name, 'db name is null'); //jshint ignore:line + assert.ok(null != db.user, 'db user is null'); //jshint ignore:line + assert.ok(null != db.pass, 'db pass is null'); //jshint ignore:line + /*eslint-enable */ +} + + +function formatDbUri(dbConf, replicaSetName) { + var dbUrl = util.format('mongodb://%s:%s@%s:%s/%s', dbConf.user, dbConf.pass, dbConf.host, dbConf.port, dbConf.name); + if (replicaSetName && replicaSetName.length > 0) { + dbUrl = dbUrl + "?replicaSet=" + replicaSetName; + } + return dbUrl; +} + +/** + * Read file and return it size + * If provided location is invalid first argument would contain error. + * + * @param fileLocation + * @param callback + */ +function readFileSize(fileLocation, callback) { + // Avoid relative paths also for security issues + if (!path.isAbsolute(fileLocation)) { + return callback(new Error('Path must be an absolute path!')); + } + + fs.stat(fileLocation, function(err, stats) { + if (err) { + return callback(err); + } + if (!stats.isFile()) { + return callback(new Error(fileLocation + " is not a file")); + } + return callback(null, stats.size); + }); +} + +exports.setResponseHeaders = setResponseHeaders; +exports.getVersion = getVersion; +exports.handleError = handleError; +exports.buildErrorObject = buildErrorObject; +exports.logError = logError; +exports.getIPAddress = getIPAddress; +exports.sortObject = sortObject; +exports.randomPassword = randomPassword; +exports.randomUser = randomUser; +exports.checkDbConf = checkDbConf; +exports.formatDbUri = formatDbUri; +exports.make_passwd = make_passwd; +exports.readFileSize = readFileSize; diff --git a/lib/util/configvalidation.js b/lib/util/configvalidation.js new file mode 100755 index 0000000..365d2a7 --- /dev/null +++ b/lib/util/configvalidation.js @@ -0,0 +1,78 @@ +var assert = require('assert'); +var util = require('util'); +var exceptionMessages = {}; + +exceptionMessages.MISSING_CONFIG = "The Config file %s or Object is missing!"; +exceptionMessages.MISSING_CONFIG_SECTION = "Config section %s missing!"; +exceptionMessages.UNPARSABLE_CONFIG = "The config file %s was unparsable %s!"; +exceptionMessages.CONFIG_REMOVED = "The Config setting: %s should be removed!"; + +// Validate our expected config +exports.configvalidation = function(config) { + var cfg = config; + assert('object' === typeof cfg, exceptionMessages.MISSING_CONFIG); + + assert.equal(true, cfg.fhmbaas.hasOwnProperty("port"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'port')); + + assert.equal(true, cfg.hasOwnProperty("mongo"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'mongo')); + assert.equal(true, cfg.mongo.hasOwnProperty("enabled"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'mongo.enabled')); + assert.equal(true, cfg.mongo.hasOwnProperty("host"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'mongo.host')); + assert.equal(true, cfg.mongo.hasOwnProperty("port"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'mongo.port')); + assert.equal(true, cfg.mongo.hasOwnProperty("name"), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'mongo.name')); + + assert.equal(true, cfg.hasOwnProperty('fhditch'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhditch')); + assert.equal(true, cfg.fhditch.hasOwnProperty('host'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhditch.host')); + assert.equal(true, cfg.fhditch.hasOwnProperty('port'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhditch.port')); + assert.equal(true, cfg.fhditch.hasOwnProperty('protocol'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhditch.protocol')); + + assert.equal(true, cfg.hasOwnProperty('fhdfc'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc')); + assert.equal(true, cfg.fhdfc.hasOwnProperty('dynofarm'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc.dynofarm')); + assert.equal(true, cfg.fhdfc.hasOwnProperty('username'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc.username')); + assert.equal(true, cfg.fhdfc.hasOwnProperty('_password'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc._password')); + assert.equal(true, cfg.fhdfc.hasOwnProperty('loglevel'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc.loglevel')); + assert.equal(true, cfg.fhdfc.hasOwnProperty('cache_timeout'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhdfc.cache_timeout')); + + assert.equal(true, cfg.hasOwnProperty('fhamqp'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp')); + assert.equal(true, cfg.fhamqp.hasOwnProperty('enabled'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.enabled')); + assert.equal(true, cfg.fhamqp.hasOwnProperty('max_connection_retry'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.max_connection_retry')); + assert.equal(true, cfg.fhamqp.hasOwnProperty('ssl'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.ssl')); + assert.equal(true, cfg.fhamqp.hasOwnProperty('vhosts'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.vhosts')); + assert.equal(true, cfg.fhamqp.vhosts.hasOwnProperty('events'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.vhosts.events')); + assert.equal(true, cfg.fhamqp.vhosts.events.hasOwnProperty('clusterNodes'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.vhosts.events.name')); + + assert.equal(true, cfg.fhamqp.vhosts.hasOwnProperty('internal'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.vhosts.events')); + assert.equal(true, cfg.fhamqp.vhosts.internal.hasOwnProperty('clusterNodes'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.vhosts.internal.name')); + assert.equal(true, cfg.fhamqp.hasOwnProperty('app'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.app')); + assert.equal(true, cfg.fhamqp.app.hasOwnProperty('enabled'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhamqp.app.enabled')); + + assert.equal(true, cfg.hasOwnProperty('fhmessaging'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('enabled'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.enabled')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('host'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.host')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('protocol'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.protocol')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('port'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.port')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('path'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.path')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('cluster'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.cluster')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('realtime'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.realtime')); + assert.equal(true, cfg.fhmessaging.hasOwnProperty('files'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.files')); + assert.equal(true, cfg.fhmessaging.files.hasOwnProperty('recovery_file'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.files.recovery_file')); + assert.equal(true, cfg.fhmessaging.files.hasOwnProperty('backup_file'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhmessaging.files.backup_file')); + + assert.equal(true, cfg.hasOwnProperty('fhstats'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhstats')); + assert.equal(true, cfg.fhstats.hasOwnProperty('enabled'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhstats.enabled')); + assert.equal(true, cfg.fhstats.hasOwnProperty('host'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhstats.host')); + assert.equal(true, cfg.fhstats.hasOwnProperty('port'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhstats.port')); + assert.equal(true, cfg.fhstats.hasOwnProperty('protocol'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhstats.protocol')); + + + assert.equal(true, cfg.hasOwnProperty('fhredis'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhredis')); + assert.equal(true, cfg.fhredis.hasOwnProperty('host'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhredis.host')); + assert.equal(true, cfg.fhredis.hasOwnProperty('port'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhredis.port')); + assert.equal(true, cfg.fhredis.hasOwnProperty('password'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'fhredis.password')); + + + assert.equal(true, cfg.hasOwnProperty('crash_monitor'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'crash_monitor')); + assert.equal(true, cfg.crash_monitor.hasOwnProperty('min_num_crashes'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'crash_monitor.min_num_crashes')); + assert.equal(true, cfg.crash_monitor.hasOwnProperty('tolerance'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'crash_monitor.tolerance')); + assert.equal(true, cfg.crash_monitor.hasOwnProperty('base_time_seconds'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'crash_monitor.base_time_seconds')); + assert.equal(true, cfg.crash_monitor.hasOwnProperty('sample_time_hrs'), util.format(exceptionMessages.MISSING_CONFIG_SECTION, 'crash_monitor.sample_time_hrs')); +}; diff --git a/lib/util/dfutils.js b/lib/util/dfutils.js new file mode 100644 index 0000000..a98c6c3 --- /dev/null +++ b/lib/util/dfutils.js @@ -0,0 +1,122 @@ +// In most clusters, the dyno name will be , in older ones it will be just +var util = require('util'); +var config = require('fh-config'); +var _ = require('underscore'); +var dfc; + +if (!config.value('openshift3')) { + var fhdfc = require('fh-dfc'); + dfc = fhdfc(config.value('fhdfc')); +} + +var cache = {}; //TODO - move this to redis. + +var cacheTimeoutValue = config.value("fhdfc.cache_timeout"); + +//If the cache timeout value is not set, then the process should throw an error. +//This is because the interval will consume CPU if invalid. +if (!_.isNumber(cacheTimeoutValue) || cacheTimeoutValue <= 200) { + throw "Invalid Config Value For cache_timeout. Must be a Number >= 200ms"; +} + +var cacheInterval = setInterval(function() { + cache = {}; +}, cacheTimeoutValue); + +function checkDynoNameForApp(dynoname, appname, cb) { + dfc.dynos([dynoname], function(err) { + if (err) { + return cb('Problems reaching Dyno ' + dynoname + ' app:' + appname + ' err: ' + util.inspect(err)); + } else { + var cmd = 'read-app'; + dfc[cmd]([dynoname, appname], function(err) { + if (err) { + return cb('Failed to find app ' + appname + ' in dyno : ' + dynoname + ' err: ' + util.inspect(err)); + } else { + return cb(null, dynoname); + } + }); + } + }); +} + +function getDynoNameFromDynoFarm(domain, env, appname, cb) { + var dynoName = domain + '-' + env; + var cacheKey = domain + '-' + env; + if (cache[cacheKey]) { + return cb(null, cache[cacheKey]); + } + checkDynoNameForApp(dynoName, appname, function(err) { + if (err) { + dynoName = domain; + checkDynoNameForApp(dynoName, appname, function(err) { + if (err) { + return cb('Can not find dyno for app :' + appname + ' in domain : ' + domain + ' env: ' + env + ' err: ' + util.inspect(err)); + } else { + cache[cacheKey] = dynoName; + return cb(null, dynoName); + } + }); + } else { + cache[cacheKey] = dynoName; + return cb(null, dynoName); + } + }); +} + +function stopApp(domain, env, app, cb) { + getDynoNameFromDynoFarm(domain, env, app, function(err, dynoName) { + if (err) { + return cb(err); + } + var cmd = 'stop-app'; + dfc[cmd]([dynoName, app], function(err) { + if (err) { + return cb(err); + } + return cb(); + }); + }); +} + +function migrateAppDb(action, domain, env, app, cb) { + getDynoNameFromDynoFarm(domain, env, app, function(err, dynoName) { + if (err) { + return cb(err); + } + var cmd = 'appmigrate'; + dfc[cmd]([action, dynoName, app], function(err) { + if (err) { + return cb(err); + } + return cb(); + }); + }); +} + +function reloadEnv(domain, env, app, cb) { + getDynoNameFromDynoFarm(domain, env, app, function(err, dynoName) { + if (err) { + return cb(err); + } + var cmd = 'env'; + dfc[cmd](['get', dynoName, app], function(err, envs) { + if (err) { + return cb(err); + } + dfc[cmd](['set', dynoName, app, envs, domain, env], function(err) { + if (err) { + return cb(err); + } + return cb(); + }); + }); + }); +} + +exports.stopApp = stopApp; +exports.migrateAppDb = migrateAppDb; +exports.reloadEnv = reloadEnv; +exports.clearInterval = function() { + clearInterval(cacheInterval); +}; diff --git a/lib/util/ditchhelper.js b/lib/util/ditchhelper.js new file mode 100644 index 0000000..02e2212 --- /dev/null +++ b/lib/util/ditchhelper.js @@ -0,0 +1,104 @@ +var request = require('request'); +var fhconfig = require('fh-config'); +var logger = require('../util/logger').getLogger(); +var url = require('url'); + +function getDitchUrl(path) { + return url.format({ + protocol: fhconfig.value('fhditch.protocol'), + hostname: fhconfig.value('fhditch.host'), + port: fhconfig.value('fhditch.port'), + pathname: path + }); +} + +function doMigrate(domain, env, appName, securityToken, appGuid, coreHost, cb) { + var url = getDitchUrl('/admin/migratedb'); + request.post({ + url: url, + headers: { + 'x-fh-service-key': fhconfig.value('fhditch.service_key') + }, + json: { + securityToken: securityToken, + domain: domain, + env: env, + appName: appName, + appGuid: appGuid, + coreHost: coreHost + } + }, function(err, response, body) { + if (err) { + logger.error({error: err, body: body}, 'Got error when calling ditch migratedb endpoint'); + return cb(err, response); + } else { + return cb(null, body); + } + }); +} + +function checkStatus(cb) { + var url = getDitchUrl('/sys/info/status'); + request.get({ + url: url, + json: true, + strictSSL: false + }, function(err, response, body) { + if (err) { + return cb(err); + } + return cb(null, {statusCode: response.statusCode, status: body.status, message: body.message}); + }); +} + +function removeAppCollection(appName, cb) { + var url = getDitchUrl('/admin/dropCollection'); + request.del({ + url: url, + headers: { + 'x-fh-service-key': fhconfig.value('fhditch.service_key') + }, + json: { + appName: appName + }, + strictSSL: false + }, function(err, response, body) { + if (err) { + return cb(err); + } + return cb(null, {response: response, body: body}); + }); +} + +function getAppInfo(appName, cb) { + var url = getDitchUrl('/conn/shared'); + + logger.info('Invoking ditch:', {url: url, appName: appName}); + + request( + { + url: url, + headers: { + 'x-fh-service-key': fhconfig.value('fhditch.service_key') + }, + qs: { + app: appName + } + }, function(err, res, body) { + if (err) { + return cb(err); + } + if (res.statusCode !== 200) { + return cb('Http Error: ' + res.statusCode, body); + } + + return cb(null, JSON.parse(body)); + } + ); +} + + +exports.doMigrate = doMigrate; +exports.checkStatus = checkStatus; +exports.removeAppCollection = removeAppCollection; +exports.getAppInfo = getAppInfo; \ No newline at end of file diff --git a/lib/util/logger.js b/lib/util/logger.js new file mode 100644 index 0000000..6f69e71 --- /dev/null +++ b/lib/util/logger.js @@ -0,0 +1,33 @@ +var _ = require('underscore'); +var fh_logger = require('fh-logger'); +var logger; + +function setLogger(logr) { + logger = logr; +} + +// If logger hasn't been previously set (which can happen in the tests for example), +// default to a very basic bunyan logger. +// If the tests need a better logger they can create on in setUp as required.. +function getLogger() { + if (logger) { + return logger; + } + + logger = fh_logger.createLogger({ + name: 'test-fh-mbaas', + streams: [ + { + "type": "stream", + "level": "error", + "stream": "process.stdout" + } + ] + }); + return logger; +} + +module.exports = { + getLogger: getLogger, + setLogger: setLogger +}; diff --git a/lib/util/mongo.js b/lib/util/mongo.js new file mode 100644 index 0000000..40f5a2b --- /dev/null +++ b/lib/util/mongo.js @@ -0,0 +1,74 @@ +var MongoClient = require('mongodb').MongoClient; +var logger = require('../util/logger').getLogger(); + +function handleError(db, err, message, cb) { + if (db && db.close) { + db.close(); + } + logger.error(err, message); + if (cb) { + return cb(err); + } +} + +// create a database, including user name and pwd +function createDb(config, dbUser, dbUserPass, dbName, cb) { + logger.trace({user: dbUser, pwd: dbUserPass, name: dbName}, 'creating new datatbase'); + + MongoClient.connect(config.mongoUrl, function(err, db) { + if (err) { + return handleError(null, err, 'cannot open mongodb connection', cb); + } + + var targetDb = db.db(dbName); + targetDb.authenticate(config.mongo.admin_auth.user, config.mongo.admin_auth.pass, {'authSource': 'admin'}, function(err) { + if (err) { + return handleError(db, err, 'can not authenticate admin user', cb); + } + + // add user to database + targetDb.addUser(dbUser, dbUserPass, function(err, user) { + if (err) { + return handleError(db, err, 'can not add user', cb); + } + logger.trace({user: user, database: dbName}, 'mongo added new user'); + + db.close(); + return cb(); + }); + }); + }); +} + +//drop a database +function dropDb(config, dbUser, dbName, cb) { + logger.trace({user: dbUser, name: dbName}, 'drop database'); + MongoClient.connect(config.mongoUrl, function(err, dbObj) { + if (err) { + return handleError(null, err, 'cannot open mongodb connection', cb); + } + + var dbToDrop = dbObj.db(dbName); + dbToDrop.authenticate(config.mongo.admin_auth.user, config.mongo.admin_auth.pass, {'authSource': 'admin'}, function(err) { + if (err) { + return handleError(dbObj, err, 'can not authenticate admin user', cb); + } + + dbToDrop.removeUser(dbUser, function(err) { + if (err) { + logger.error(err, 'failed to remove user'); + } + dbToDrop.dropDatabase(function(err) { + if (err) { + return handleError(dbObj, err, 'failed to drop database', cb); + } + dbObj.close(); + return cb(); + }); + }); + }); + }); +} + +exports.createDb = createDb; +exports.dropDb = dropDb; diff --git a/lib/util/requiredvalidation.js b/lib/util/requiredvalidation.js new file mode 100755 index 0000000..3e60c4d --- /dev/null +++ b/lib/util/requiredvalidation.js @@ -0,0 +1,58 @@ + +var validation = [ + "crash_monitor", + "crash_monitor.enabled", + "crash_monitor.min_num_crashes", + "crash_monitor.max_num_crashes", + "crash_monitor.tolerance", + "crash_monitor.base_time_seconds", + "crash_monitor.sample_time_hrs", + "email", + "email.transport", + "email.alert_email_from", + "email.alert_email_bcc", + "fhmbaas.port", + "mongo.enabled", + "mongo.host", + "mongo.port", + "mongo.name", + "fhditch.host", + "fhditch.port", + "fhditch.protocol", + "fhdfc.dynofarm", + "fhdfc.username", + "fhdfc._password", + "fhdfc.loglevel", + "fhdfc.cache_timeout", + "fhamqp.enabled", + "fhamqp.max_connection_retry", + "fhamqp.ssl", + "fhamqp.vhosts", + "fhamqp.vhosts.events", + "fhamqp.vhosts.internal", + "fhamqp.vhosts.events.clusterNodes", + "fhamqp.vhosts.internal.clusterNodes", + "fhamqp.app", + "fhamqp.app.enabled", + "fhamqp.nodes", + "fhmessaging.enabled", + "fhmessaging.host", + "fhmessaging.protocol", + "fhmessaging.port", + "fhmessaging.path", + "fhmessaging.cluster", + "fhmessaging.realtime", + "fhmessaging.files", + "fhmessaging.files.recovery_file", + "fhmessaging.files.backup_file", + "fhmetrics.host", + "fhmetrics.port", + "fhmetrics.protocol", + "fhmetrics.apikey", + "fhstats.enabled", + "fhstats.host", + "fhstats.port", + "fhstats.protocol" +]; + +module.exports = validation; diff --git a/lib/util/supercoreApiClient.js b/lib/util/supercoreApiClient.js new file mode 100644 index 0000000..1605958 --- /dev/null +++ b/lib/util/supercoreApiClient.js @@ -0,0 +1,57 @@ +var request = require('request'); +var _ = require('underscore'); + +function buildUrl(base,domain, env, appGuid) { + if (base.indexOf("http")!==0) { + base = "https://" + base; + } + return base + "/api/v2/mbaas/" + domain + "/" + env + "/" + "apps" + "/" + appGuid + "/mbaasMessage"; +} + +/** + * Endpoint used to notify supercore with speicified payload. + * + * messageType - type of message to sent - requires specified handler to be defined in supercore. + * data - data to send + * coreHost - base url to supercore + */ +function sendMessageToSupercore(coreHost, requestType, data, cb) { + var logger = require('../util/logger').getLogger(); + // Setting request type to payload + data.requestType = requestType; + var url = buildUrl(coreHost, data.domain, data.env, data.appGuid); + + var reqParams = { + url: url, + json: data, + headers: { + "User-Agent": "FHMBAAS" + } + }; + + //Setting the Reqest ID header if required. + if (_.isFunction(logger.getRequestId)) { + var reqId = logger.getRequestId(); + if (reqId) { + reqParams.headers[logger.requestIdHeader] = reqId; + } + } + + logger.debug('send status update back to core', {url: url, data: data}); + request.post(reqParams, function(err, response, body) { + logger.debug('got response back from core', {status: response && response.statusCode, body: body}); + if (err) { + logger.error({ + error: err, + status: response && response.statusCode, + body: body + }, 'Got error when calling supercore endpoint'); + return cb(err, response); + } else { + return cb(null, body); + } + }); +} + +// Sends message back to supercore +exports.sendMessageToSupercore = sendMessageToSupercore; \ No newline at end of file diff --git a/lib/util/validation.js b/lib/util/validation.js new file mode 100755 index 0000000..b9cfdee --- /dev/null +++ b/lib/util/validation.js @@ -0,0 +1,22 @@ +var common = require('./common.js'); + +function requireParam(name, req, res) { + + var param = req.body[name]; + if (!param || param.length <=0) { + common.handleError(new Error('Missing param ' + name), 'Missing param ' + name, 400, req, res); + return null; + } + return param; +} + +function validateParamPresent(name, req) { + var param = req.body[name]; + + return !(param && param.length > 0 ); +} + +module.exports = { + requireParam: requireParam, + validateParamPresent: validateParamPresent +}; diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json new file mode 100644 index 0000000..6aa48ac --- /dev/null +++ b/npm-shrinkwrap.json @@ -0,0 +1,6791 @@ +{ + "name": "fh-mbaas", + "version": "5.3.12-BUILD-NUMBER", + "dependencies": { + "archiver": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/archiver/-/archiver-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-1.2.0.tgz", + "dependencies": { + "archiver-utils": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-1.3.0.tgz", + "dependencies": { + "graceful-fs": { + "version": "4.1.10", + "from": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.10.tgz", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.10.tgz" + }, + "lazystream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz" + }, + "normalize-path": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz" + } + } + }, + "async": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-2.1.2.tgz" + }, + "buffer-crc32": { + "version": "0.2.6", + "from": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.6.tgz", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.6.tgz" + }, + "glob": { + "version": "7.1.1", + "from": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "dependencies": { + "fs.realpath": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + }, + "lodash": { + "version": "4.17.2", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz" + }, + "readable-stream": { + "version": "2.2.2", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz", + "dependencies": { + "buffer-shims": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz" + }, + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + }, + "tar-stream": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.2.tgz", + "dependencies": { + "bl": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "end-of-stream": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.1.0.tgz", + "dependencies": { + "once": { + "version": "1.3.3", + "from": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "resolved": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + } + } + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "zip-stream": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/zip-stream/-/zip-stream-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-1.1.0.tgz", + "dependencies": { + "compress-commons": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/compress-commons/-/compress-commons-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-1.1.0.tgz", + "dependencies": { + "crc32-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-1.0.0.tgz" + }, + "normalize-path": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz" + } + } + } + } + } + } + }, + "async": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz" + }, + "body-parser": { + "version": "1.14.1", + "from": "https://registry.npmjs.org/body-parser/-/body-parser-1.14.1.tgz", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.14.1.tgz", + "dependencies": { + "bytes": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/bytes/-/bytes-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-2.1.0.tgz" + }, + "content-type": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "dependencies": { + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + } + } + }, + "depd": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz" + }, + "http-errors": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.3.1.tgz", + "dependencies": { + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "statuses": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.0.tgz" + } + } + }, + "iconv-lite": { + "version": "0.4.12", + "from": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.12.tgz", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.12.tgz" + }, + "on-finished": { + "version": "2.3.0", + "from": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "dependencies": { + "ee-first": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + } + } + }, + "qs": { + "version": "5.1.0", + "from": "https://registry.npmjs.org/qs/-/qs-5.1.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-5.1.0.tgz" + }, + "raw-body": { + "version": "2.1.7", + "from": "https://registry.npmjs.org/raw-body/-/raw-body-2.1.7.tgz", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.1.7.tgz", + "dependencies": { + "bytes": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/bytes/-/bytes-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-2.4.0.tgz" + }, + "iconv-lite": { + "version": "0.4.13", + "from": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.13.tgz", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.13.tgz" + }, + "unpipe": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + } + } + }, + "type-is": { + "version": "1.6.13", + "from": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "dependencies": { + "media-typer": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + } + } + } + } + }, + "bunyan": { + "version": "1.5.1", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dependencies": { + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + } + } + }, + "cors": { + "version": "2.7.1", + "from": "https://registry.npmjs.org/cors/-/cors-2.7.1.tgz", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.7.1.tgz", + "dependencies": { + "vary": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz" + } + } + }, + "cuid": { + "version": "1.3.8", + "from": "https://registry.npmjs.org/cuid/-/cuid-1.3.8.tgz", + "resolved": "https://registry.npmjs.org/cuid/-/cuid-1.3.8.tgz", + "dependencies": { + "browser-fingerprint": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/browser-fingerprint/-/browser-fingerprint-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/browser-fingerprint/-/browser-fingerprint-0.0.1.tgz" + }, + "core-js": { + "version": "1.2.7", + "from": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz" + }, + "node-fingerprint": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/node-fingerprint/-/node-fingerprint-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/node-fingerprint/-/node-fingerprint-0.0.2.tgz" + } + } + }, + "diskspace": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/diskspace/-/diskspace-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/diskspace/-/diskspace-1.0.2.tgz" + }, + "express": { + "version": "4.14.0", + "from": "https://registry.npmjs.org/express/-/express-4.14.0.tgz", + "resolved": "https://registry.npmjs.org/express/-/express-4.14.0.tgz", + "dependencies": { + "accepts": { + "version": "1.3.3", + "from": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz", + "dependencies": { + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "negotiator": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" + } + } + }, + "array-flatten": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + }, + "content-disposition": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.1.tgz" + }, + "content-type": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz" + }, + "cookie": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + }, + "cookie-signature": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "dependencies": { + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + } + } + }, + "depd": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz" + }, + "encodeurl": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz" + }, + "escape-html": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + }, + "etag": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/etag/-/etag-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.7.0.tgz" + }, + "finalhandler": { + "version": "0.5.0", + "from": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.5.0.tgz", + "dependencies": { + "statuses": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz" + }, + "unpipe": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + } + } + }, + "fresh": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/fresh/-/fresh-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.3.0.tgz" + }, + "merge-descriptors": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + }, + "methods": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + }, + "on-finished": { + "version": "2.3.0", + "from": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "dependencies": { + "ee-first": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + } + } + }, + "parseurl": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.1.tgz" + }, + "path-to-regexp": { + "version": "0.1.7", + "from": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + }, + "proxy-addr": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.1.2.tgz", + "dependencies": { + "forwarded": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.0.tgz" + }, + "ipaddr.js": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.1.1.tgz" + } + } + }, + "qs": { + "version": "6.2.0", + "from": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz" + }, + "range-parser": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + }, + "send": { + "version": "0.14.1", + "from": "https://registry.npmjs.org/send/-/send-0.14.1.tgz", + "resolved": "https://registry.npmjs.org/send/-/send-0.14.1.tgz", + "dependencies": { + "destroy": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" + }, + "http-errors": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/http-errors/-/http-errors-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.5.0.tgz", + "dependencies": { + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "setprototypeof": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.1.tgz" + } + } + }, + "mime": { + "version": "1.3.4", + "from": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + }, + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "statuses": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz" + } + } + }, + "serve-static": { + "version": "1.11.1", + "from": "https://registry.npmjs.org/serve-static/-/serve-static-1.11.1.tgz", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.11.1.tgz" + }, + "type-is": { + "version": "1.6.13", + "from": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "dependencies": { + "media-typer": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + } + } + }, + "utils-merge": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz" + }, + "vary": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz" + } + } + }, + "express-bunyan-logger": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/express-bunyan-logger/-/express-bunyan-logger-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/express-bunyan-logger/-/express-bunyan-logger-1.2.0.tgz", + "dependencies": { + "node-uuid": { + "version": "1.4.7", + "from": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "useragent": { + "version": "2.1.9", + "from": "https://registry.npmjs.org/useragent/-/useragent-2.1.9.tgz", + "resolved": "https://registry.npmjs.org/useragent/-/useragent-2.1.9.tgz", + "dependencies": { + "lru-cache": { + "version": "2.2.4", + "from": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.2.4.tgz", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.2.4.tgz" + } + } + } + } + }, + "express-paginate": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/express-paginate/-/express-paginate-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/express-paginate/-/express-paginate-0.2.0.tgz", + "dependencies": { + "lodash.assign": { + "version": "3.2.0", + "from": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-3.2.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-3.2.0.tgz", + "dependencies": { + "lodash._baseassign": { + "version": "3.2.0", + "from": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "resolved": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "dependencies": { + "lodash._basecopy": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz" + } + } + }, + "lodash._createassigner": { + "version": "3.1.1", + "from": "https://registry.npmjs.org/lodash._createassigner/-/lodash._createassigner-3.1.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._createassigner/-/lodash._createassigner-3.1.1.tgz", + "dependencies": { + "lodash._bindcallback": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz" + }, + "lodash._isiterateecall": { + "version": "3.0.9", + "from": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz", + "resolved": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" + }, + "lodash.restparam": { + "version": "3.6.1", + "from": "https://registry.npmjs.org/lodash.restparam/-/lodash.restparam-3.6.1.tgz", + "resolved": "https://registry.npmjs.org/lodash.restparam/-/lodash.restparam-3.6.1.tgz" + } + } + }, + "lodash.keys": { + "version": "3.1.2", + "from": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "dependencies": { + "lodash._getnative": { + "version": "3.9.1", + "from": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz" + }, + "lodash.isarguments": { + "version": "3.1.0", + "from": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz" + }, + "lodash.isarray": { + "version": "3.0.4", + "from": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz", + "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + } + } + } + } + }, + "lodash.clone": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/lodash.clone/-/lodash.clone-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/lodash.clone/-/lodash.clone-3.0.3.tgz", + "dependencies": { + "lodash._baseclone": { + "version": "3.3.0", + "from": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-3.3.0.tgz", + "resolved": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-3.3.0.tgz", + "dependencies": { + "lodash._arraycopy": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/lodash._arraycopy/-/lodash._arraycopy-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/lodash._arraycopy/-/lodash._arraycopy-3.0.0.tgz" + }, + "lodash._arrayeach": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/lodash._arrayeach/-/lodash._arrayeach-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/lodash._arrayeach/-/lodash._arrayeach-3.0.0.tgz" + }, + "lodash._baseassign": { + "version": "3.2.0", + "from": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "resolved": "https://registry.npmjs.org/lodash._baseassign/-/lodash._baseassign-3.2.0.tgz", + "dependencies": { + "lodash._basecopy": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz" + } + } + }, + "lodash._basefor": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/lodash._basefor/-/lodash._basefor-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/lodash._basefor/-/lodash._basefor-3.0.3.tgz" + }, + "lodash.isarray": { + "version": "3.0.4", + "from": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz", + "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-3.0.4.tgz" + }, + "lodash.keys": { + "version": "3.1.2", + "from": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", + "dependencies": { + "lodash._getnative": { + "version": "3.9.1", + "from": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._getnative/-/lodash._getnative-3.9.1.tgz" + }, + "lodash.isarguments": { + "version": "3.1.0", + "from": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz" + } + } + } + } + }, + "lodash._bindcallback": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz" + }, + "lodash._isiterateecall": { + "version": "3.0.9", + "from": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz", + "resolved": "https://registry.npmjs.org/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz" + } + } + }, + "lodash.isobject": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-3.0.2.tgz" + }, + "querystring": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + } + } + }, + "fh-agenda": { + "version": "0.9.0", + "from": "https://registry.npmjs.org/fh-agenda/-/fh-agenda-0.9.0.tgz", + "resolved": "https://registry.npmjs.org/fh-agenda/-/fh-agenda-0.9.0.tgz", + "dependencies": { + "cron": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/cron/-/cron-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/cron/-/cron-1.1.1.tgz" + }, + "date.js": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/date.js/-/date.js-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/date.js/-/date.js-0.3.1.tgz", + "dependencies": { + "debug": { + "version": "0.7.4", + "from": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz" + }, + "lodash.filter": { + "version": "4.6.0", + "from": "https://registry.npmjs.org/lodash.filter/-/lodash.filter-4.6.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.filter/-/lodash.filter-4.6.0.tgz" + }, + "lodash.findkey": { + "version": "4.6.0", + "from": "https://registry.npmjs.org/lodash.findkey/-/lodash.findkey-4.6.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.findkey/-/lodash.findkey-4.6.0.tgz" + }, + "lodash.foreach": { + "version": "4.5.0", + "from": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz" + }, + "lodash.includes": { + "version": "4.3.0", + "from": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz" + }, + "lodash.isempty": { + "version": "4.4.0", + "from": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz" + }, + "lodash.partition": { + "version": "4.6.0", + "from": "https://registry.npmjs.org/lodash.partition/-/lodash.partition-4.6.0.tgz", + "resolved": "https://registry.npmjs.org/lodash.partition/-/lodash.partition-4.6.0.tgz" + }, + "lodash.trim": { + "version": "4.5.1", + "from": "https://registry.npmjs.org/lodash.trim/-/lodash.trim-4.5.1.tgz", + "resolved": "https://registry.npmjs.org/lodash.trim/-/lodash.trim-4.5.1.tgz" + } + } + }, + "human-interval": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/human-interval/-/human-interval-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/human-interval/-/human-interval-0.1.6.tgz" + }, + "moment-timezone": { + "version": "0.5.6", + "from": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.6.tgz", + "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.6.tgz", + "dependencies": { + "moment": { + "version": "2.15.1", + "from": "https://registry.npmjs.org/moment/-/moment-2.15.1.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.15.1.tgz" + } + } + }, + "mongodb": { + "version": "2.1.11", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.11.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.11.tgz", + "dependencies": { + "es6-promise": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz" + }, + "mongodb-core": { + "version": "1.3.10", + "from": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.10.tgz", + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.10.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "require_optional": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "dependencies": { + "semver": { + "version": "5.3.0", + "from": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" + }, + "resolve-from": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "1.0.31", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + } + } + }, + "fh-amqp-js": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/fh-amqp-js/-/fh-amqp-js-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/fh-amqp-js/-/fh-amqp-js-0.7.1.tgz", + "dependencies": { + "amqp": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/amqp/-/amqp-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/amqp/-/amqp-0.2.0.tgz", + "dependencies": { + "lodash": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz" + } + } + }, + "async": { + "version": "0.2.7", + "from": "https://registry.npmjs.org/async/-/async-0.2.7.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.7.tgz" + }, + "lodash": { + "version": "2.4.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "rc": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/rc/-/rc-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/rc/-/rc-0.1.1.tgz", + "dependencies": { + "optimist": { + "version": "0.3.7", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + } + } + }, + "deep-extend": { + "version": "0.2.11", + "from": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "ini": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz" + } + } + } + } + }, + "fh-cls-mongoose": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz" + } + } + }, + "fh-cluster": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/fh-cluster/-/fh-cluster-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/fh-cluster/-/fh-cluster-0.3.0.tgz", + "dependencies": { + "backoff": { + "version": "2.4.1", + "from": "https://registry.npmjs.org/backoff/-/backoff-2.4.1.tgz", + "resolved": "https://registry.npmjs.org/backoff/-/backoff-2.4.1.tgz", + "dependencies": { + "precond": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz" + } + } + }, + "lodash": { + "version": "3.10.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" + } + } + }, + "fh-component-metrics": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/fh-component-metrics/-/fh-component-metrics-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/fh-component-metrics/-/fh-component-metrics-2.2.1.tgz", + "dependencies": { + "async": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/async/-/async-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-2.0.1.tgz", + "dependencies": { + "lodash": { + "version": "4.15.0", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.15.0.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.15.0.tgz" + } + } + }, + "lodash": { + "version": "4.3.0", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.3.0.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.3.0.tgz" + } + } + }, + "fh-config": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/fh-config/-/fh-config-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/fh-config/-/fh-config-1.0.3.tgz", + "dependencies": { + "async": { + "version": "0.9.2", + "from": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + }, + "bunyan": { + "version": "1.8.4", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.4.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.4.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.7.1.tgz", + "dependencies": { + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + }, + "moment": { + "version": "2.15.2", + "from": "https://registry.npmjs.org/moment/-/moment-2.15.2.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.15.2.tgz" + } + } + }, + "dateformat": { + "version": "1.0.12", + "from": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.12.tgz", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.12.tgz", + "dependencies": { + "get-stdin": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz" + }, + "meow": { + "version": "3.7.0", + "from": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", + "dependencies": { + "camelcase-keys": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "dependencies": { + "camelcase": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz" + } + } + }, + "decamelize": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" + }, + "loud-rejection": { + "version": "1.6.0", + "from": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", + "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", + "dependencies": { + "currently-unhandled": { + "version": "0.4.1", + "from": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "dependencies": { + "array-find-index": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz" + } + } + }, + "signal-exit": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz" + } + } + }, + "map-obj": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz" + }, + "minimist": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" + }, + "normalize-package-data": { + "version": "2.3.5", + "from": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.3.5.tgz", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.3.5.tgz", + "dependencies": { + "hosted-git-info": { + "version": "2.1.5", + "from": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.1.5.tgz", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.1.5.tgz" + }, + "is-builtin-module": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", + "dependencies": { + "builtin-modules": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz" + } + } + }, + "semver": { + "version": "5.3.0", + "from": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" + }, + "validate-npm-package-license": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz", + "dependencies": { + "spdx-correct": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.2.tgz", + "dependencies": { + "spdx-license-ids": { + "version": "1.2.2", + "from": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz" + } + } + }, + "spdx-expression-parse": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz" + } + } + } + } + }, + "object-assign": { + "version": "4.1.0", + "from": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz" + }, + "read-pkg-up": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "dependencies": { + "find-up": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "dependencies": { + "path-exists": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz" + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "read-pkg": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "dependencies": { + "load-json-file": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "dependencies": { + "graceful-fs": { + "version": "4.1.9", + "from": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.9.tgz", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.9.tgz" + }, + "parse-json": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "dependencies": { + "error-ex": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.0.tgz", + "dependencies": { + "is-arrayish": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" + } + } + } + } + }, + "pify": { + "version": "2.3.0", + "from": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + }, + "strip-bom": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "dependencies": { + "is-utf8": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz" + } + } + } + } + }, + "path-type": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "dependencies": { + "graceful-fs": { + "version": "4.1.9", + "from": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.9.tgz", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.9.tgz" + }, + "pify": { + "version": "2.3.0", + "from": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + } + } + } + } + }, + "redent": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", + "dependencies": { + "indent-string": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", + "dependencies": { + "repeating": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", + "dependencies": { + "is-finite": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", + "dependencies": { + "number-is-nan": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" + } + } + } + } + } + } + }, + "strip-indent": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz" + } + } + }, + "trim-newlines": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz" + } + } + } + } + }, + "winston": { + "version": "0.8.3", + "from": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", + "resolved": "https://registry.npmjs.org/winston/-/winston-0.8.3.tgz", + "dependencies": { + "async": { + "version": "0.2.10", + "from": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + }, + "colors": { + "version": "0.6.2", + "from": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz" + }, + "cycle": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz" + }, + "eyes": { + "version": "0.1.8", + "from": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "pkginfo": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz" + }, + "stack-trace": { + "version": "0.0.9", + "from": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz" + } + } + } + } + }, + "fh-forms": { + "version": "1.10.8", + "from": "fh-forms@1.10.8", + "resolved": "https://registry.npmjs.org/fh-forms/-/fh-forms-1.10.8.tgz", + "dependencies": { + "archiver": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/archiver/-/archiver-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-1.2.0.tgz", + "dependencies": { + "archiver-utils": { + "version": "1.3.0", + "from": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-1.3.0.tgz", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-1.3.0.tgz", + "dependencies": { + "graceful-fs": { + "version": "4.1.10", + "from": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.10.tgz", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.10.tgz" + }, + "lazystream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz" + }, + "normalize-path": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz" + } + } + }, + "async": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-2.1.2.tgz" + }, + "buffer-crc32": { + "version": "0.2.6", + "from": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.6.tgz", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.6.tgz" + }, + "glob": { + "version": "7.1.1", + "from": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "dependencies": { + "fs.realpath": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + }, + "lodash": { + "version": "4.17.2", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz" + }, + "readable-stream": { + "version": "2.2.2", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz", + "dependencies": { + "buffer-shims": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/buffer-shims/-/buffer-shims-1.0.0.tgz" + }, + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + }, + "tar-stream": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.2.tgz", + "dependencies": { + "bl": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "end-of-stream": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.1.0.tgz", + "dependencies": { + "once": { + "version": "1.3.3", + "from": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "resolved": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + } + } + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "zip-stream": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/zip-stream/-/zip-stream-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-1.1.0.tgz", + "dependencies": { + "compress-commons": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/compress-commons/-/compress-commons-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-1.1.0.tgz", + "dependencies": { + "crc32-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-1.0.0.tgz" + }, + "normalize-path": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz" + } + } + } + } + } + } + }, + "async": { + "version": "0.2.9", + "from": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz" + }, + "fh-cls-mongoose": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz" + } + } + }, + "fh-gridfs": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/fh-gridfs/-/fh-gridfs-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/fh-gridfs/-/fh-gridfs-1.0.4.tgz", + "dependencies": { + "archiver": { + "version": "0.4.9", + "from": "https://registry.npmjs.org/archiver/-/archiver-0.4.9.tgz", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-0.4.9.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + }, + "iconv-lite": { + "version": "0.2.11", + "from": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz" + } + } + }, + "base64-stream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/base64-stream/-/base64-stream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/base64-stream/-/base64-stream-0.1.2.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.2.tgz" + } + } + }, + "gridfs-stream": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/gridfs-stream/-/gridfs-stream-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/gridfs-stream/-/gridfs-stream-0.4.0.tgz" + }, + "lodash": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.1.0.tgz" + }, + "rimraf": { + "version": "2.2.2", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.2.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.2.2.tgz", + "dependencies": { + "graceful-fs": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-2.0.3.tgz" + } + } + }, + "usage": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/usage/-/usage-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/usage/-/usage-0.7.1.tgz", + "dependencies": { + "bindings": { + "version": "1.2.1", + "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" + }, + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + } + } + }, + "fh-logger": { + "version": "0.5.0", + "from": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.0.tgz", + "dependencies": { + "bunyan": { + "version": "1.8.1", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dependencies": { + "nan": { + "version": "2.3.5", + "from": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "minimatch": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.5", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.1", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.3.3", + "from": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "resolved": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + }, + "moment": { + "version": "2.13.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz" + } + } + }, + "continuation-local-storage": { + "version": "3.1.7", + "from": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "dependencies": { + "async-listener": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + }, + "emitter-listener": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + } + } + }, + "fh-mbaas-client": { + "version": "0.15.0", + "from": "https://registry.npmjs.org/fh-mbaas-client/-/fh-mbaas-client-0.15.0.tgz", + "resolved": "https://registry.npmjs.org/fh-mbaas-client/-/fh-mbaas-client-0.15.0.tgz", + "dependencies": { + "underscore": { + "version": "1.8.3", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz" + } + } + }, + "handlebars": { + "version": "4.0.6", + "from": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.6.tgz", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.6.tgz", + "dependencies": { + "async": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz" + }, + "optimist": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + }, + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + } + } + }, + "source-map": { + "version": "0.4.4", + "from": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.4.4.tgz", + "dependencies": { + "amdefine": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz" + } + } + }, + "uglify-js": { + "version": "2.7.4", + "from": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.7.4.tgz", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.7.4.tgz", + "dependencies": { + "async": { + "version": "0.2.10", + "from": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz" + }, + "source-map": { + "version": "0.5.6", + "from": "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz" + }, + "uglify-to-browserify": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz" + }, + "yargs": { + "version": "3.10.0", + "from": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "dependencies": { + "camelcase": { + "version": "1.2.1", + "from": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz" + }, + "cliui": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "dependencies": { + "center-align": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "dependencies": { + "align-text": { + "version": "0.1.4", + "from": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "dependencies": { + "kind-of": { + "version": "3.0.4", + "from": "https://registry.npmjs.org/kind-of/-/kind-of-3.0.4.tgz", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.0.4.tgz", + "dependencies": { + "is-buffer": { + "version": "1.1.4", + "from": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.4.tgz", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.4.tgz" + } + } + }, + "longest": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz" + }, + "repeat-string": { + "version": "1.6.1", + "from": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" + } + } + }, + "lazy-cache": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz" + } + } + }, + "right-align": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "dependencies": { + "align-text": { + "version": "0.1.4", + "from": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "dependencies": { + "kind-of": { + "version": "3.0.4", + "from": "https://registry.npmjs.org/kind-of/-/kind-of-3.0.4.tgz", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.0.4.tgz", + "dependencies": { + "is-buffer": { + "version": "1.1.4", + "from": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.4.tgz", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.4.tgz" + } + } + }, + "longest": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz" + }, + "repeat-string": { + "version": "1.6.1", + "from": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" + } + } + } + } + }, + "wordwrap": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz" + } + } + }, + "decamelize": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" + }, + "window-size": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz" + } + } + } + } + } + } + }, + "lodash": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz" + }, + "mime": { + "version": "1.2.11", + "from": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz" + }, + "mkdirp": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dependencies": { + "minimist": { + "version": "0.0.8", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } + }, + "mmmagic": { + "version": "0.4.5", + "from": "https://registry.npmjs.org/mmmagic/-/mmmagic-0.4.5.tgz", + "resolved": "https://registry.npmjs.org/mmmagic/-/mmmagic-0.4.5.tgz", + "dependencies": { + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + }, + "moment": { + "version": "2.14.1", + "from": "https://registry.npmjs.org/moment/-/moment-2.14.1.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.14.1.tgz" + }, + "mongodb": { + "version": "2.1.18", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "dependencies": { + "es6-promise": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz" + }, + "mongodb-core": { + "version": "1.3.18", + "from": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "require_optional": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "dependencies": { + "semver": { + "version": "5.3.0", + "from": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" + }, + "resolve-from": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "1.0.31", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + }, + "mongoose": { + "version": "4.5.0", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "dependencies": { + "async": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz" + }, + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "hooks-fixed": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz" + }, + "kareem": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz" + }, + "mpath": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz" + }, + "mpromise": { + "version": "0.5.5", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz" + }, + "mquery": { + "version": "1.11.0", + "from": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "dependencies": { + "bluebird": { + "version": "2.10.2", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + } + } + }, + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "muri": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + }, + "sliced": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz" + } + } + }, + "mongoose-paginate": { + "version": "5.0.3", + "from": "https://registry.npmjs.org/mongoose-paginate/-/mongoose-paginate-5.0.3.tgz", + "resolved": "https://registry.npmjs.org/mongoose-paginate/-/mongoose-paginate-5.0.3.tgz", + "dependencies": { + "bluebird": { + "version": "3.0.5", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-3.0.5.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.0.5.tgz" + } + } + }, + "phantom": { + "version": "0.8.0", + "from": "git://github.com/fheng/phantomjs-node.git#514c855834056d08d9d60fddcd2d4243254c1955", + "resolved": "git://github.com/fheng/phantomjs-node.git#514c855834056d08d9d60fddcd2d4243254c1955", + "dependencies": { + "dnode": { + "version": "1.2.2", + "from": "https://registry.npmjs.org/dnode/-/dnode-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/dnode/-/dnode-1.2.2.tgz", + "dependencies": { + "dnode-protocol": { + "version": "0.2.2", + "from": "https://registry.npmjs.org/dnode-protocol/-/dnode-protocol-0.2.2.tgz", + "resolved": "https://registry.npmjs.org/dnode-protocol/-/dnode-protocol-0.2.2.tgz" + }, + "jsonify": { + "version": "0.0.0", + "from": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz" + }, + "weak": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/weak/-/weak-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/weak/-/weak-1.0.1.tgz", + "dependencies": { + "bindings": { + "version": "1.2.1", + "from": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.2.1.tgz" + }, + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + } + } + }, + "shoe": { + "version": "0.0.15", + "from": "git://github.com/fheng/shoe.git#45429420fece517b6df9ffc856aa5ba661a10741", + "resolved": "git://github.com/fheng/shoe.git#45429420fece517b6df9ffc856aa5ba661a10741", + "dependencies": { + "sockjs": { + "version": "0.3.7", + "from": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.7.tgz", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.7.tgz", + "dependencies": { + "node-uuid": { + "version": "1.3.3", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.3.3.tgz" + }, + "faye-websocket": { + "version": "0.4.4", + "from": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.4.4.tgz" + } + } + }, + "sockjs-client": { + "version": "0.0.0-unreleasable" + } + } + }, + "win-spawn": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/win-spawn/-/win-spawn-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/win-spawn/-/win-spawn-2.0.0.tgz" + }, + "traverse": { + "version": "0.6.6", + "from": "https://registry.npmjs.org/traverse/-/traverse-0.6.6.tgz", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.6.tgz" + } + } + }, + "request": { + "version": "2.74.0", + "from": "https://registry.npmjs.org/request/-/request-2.74.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.74.0.tgz", + "dependencies": { + "aws-sign2": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "aws4": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" + }, + "bl": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "caseless": { + "version": "0.11.0", + "from": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "extend": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/form-data/-/form-data-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.1.tgz", + "dependencies": { + "async": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "dependencies": { + "lodash": { + "version": "4.17.2", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.2.tgz" + } + } + } + } + }, + "har-validator": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "dependencies": { + "chalk": { + "version": "1.1.3", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.15.0", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "hawk": { + "version": "3.1.3", + "from": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "http-signature": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "dependencies": { + "assert-plus": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "jsprim": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "dependencies": { + "extsprintf": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "verror": { + "version": "1.3.6", + "from": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + } + } + }, + "sshpk": { + "version": "1.10.1", + "from": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "dependencies": { + "asn1": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "dashdash": { + "version": "1.14.0", + "from": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz" + }, + "getpass": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz" + }, + "jsbn": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "tweetnacl": { + "version": "0.14.3", + "from": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz" + }, + "jodid25519": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz" + }, + "ecc-jsbn": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz" + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "oauth-sign": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "qs": { + "version": "6.2.1", + "from": "https://registry.npmjs.org/qs/-/qs-6.2.1.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.2.1.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "tough-cookie": { + "version": "2.3.2", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } + }, + "tunnel-agent": { + "version": "0.4.3", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + } + } + }, + "underscore": { + "version": "1.8.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz" + }, + "yauzl": { + "version": "2.4.1", + "from": "https://registry.npmjs.org/yauzl/-/yauzl-2.4.1.tgz", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.4.1.tgz", + "dependencies": { + "fd-slicer": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.0.1.tgz", + "dependencies": { + "pend": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz" + } + } + } + } + } + } + }, + "fh-health": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/fh-health/-/fh-health-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/fh-health/-/fh-health-0.2.0.tgz", + "dependencies": { + "async": { + "version": "0.2.9", + "from": "https://registry.npmjs.org/async/-/async-0.2.9.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.9.tgz" + }, + "fhlog": { + "version": "0.12.1", + "from": "https://registry.npmjs.org/fhlog/-/fhlog-0.12.1.tgz", + "resolved": "https://registry.npmjs.org/fhlog/-/fhlog-0.12.1.tgz", + "dependencies": { + "safejson": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/safejson/-/safejson-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/safejson/-/safejson-1.0.1.tgz" + }, + "colors": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz" + }, + "lodash": { + "version": "2.4.2", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.2.tgz" + }, + "moment": { + "version": "2.8.4", + "from": "https://registry.npmjs.org/moment/-/moment-2.8.4.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.8.4.tgz" + }, + "async": { + "version": "0.9.2", + "from": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + }, + "html5-fs": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/html5-fs/-/html5-fs-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/html5-fs/-/html5-fs-0.0.1.tgz" + }, + "brfs": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/brfs/-/brfs-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/brfs/-/brfs-1.2.0.tgz", + "dependencies": { + "quote-stream": { + "version": "0.0.0", + "from": "https://registry.npmjs.org/quote-stream/-/quote-stream-0.0.0.tgz", + "resolved": "https://registry.npmjs.org/quote-stream/-/quote-stream-0.0.0.tgz", + "dependencies": { + "minimist": { + "version": "0.0.8", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } + }, + "static-module": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/static-module/-/static-module-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/static-module/-/static-module-1.3.1.tgz", + "dependencies": { + "concat-stream": { + "version": "1.4.10", + "from": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.4.10.tgz", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.4.10.tgz", + "dependencies": { + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "typedarray": { + "version": "0.0.6", + "from": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" + }, + "readable-stream": { + "version": "1.1.14", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + } + } + } + } + }, + "duplexer2": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.0.2.tgz", + "dependencies": { + "readable-stream": { + "version": "1.1.14", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + }, + "escodegen": { + "version": "1.3.3", + "from": "https://registry.npmjs.org/escodegen/-/escodegen-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.3.3.tgz", + "dependencies": { + "esutils": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/esutils/-/esutils-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-1.0.0.tgz" + }, + "estraverse": { + "version": "1.5.1", + "from": "https://registry.npmjs.org/estraverse/-/estraverse-1.5.1.tgz", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.5.1.tgz" + }, + "esprima": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/esprima/-/esprima-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.1.1.tgz" + }, + "source-map": { + "version": "0.1.43", + "from": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", + "dependencies": { + "amdefine": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz" + } + } + } + } + }, + "falafel": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/falafel/-/falafel-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/falafel/-/falafel-1.2.0.tgz", + "dependencies": { + "acorn": { + "version": "1.2.2", + "from": "https://registry.npmjs.org/acorn/-/acorn-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-1.2.2.tgz" + }, + "foreach": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "object-keys": { + "version": "1.0.11", + "from": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.11.tgz", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.11.tgz" + } + } + }, + "has": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/has/-/has-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.1.tgz", + "dependencies": { + "function-bind": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.0.tgz" + } + } + }, + "object-inspect": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/object-inspect/-/object-inspect-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-0.4.0.tgz" + }, + "readable-stream": { + "version": "1.0.34", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + }, + "shallow-copy": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/shallow-copy/-/shallow-copy-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/shallow-copy/-/shallow-copy-0.0.1.tgz" + }, + "static-eval": { + "version": "0.2.4", + "from": "https://registry.npmjs.org/static-eval/-/static-eval-0.2.4.tgz", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-0.2.4.tgz", + "dependencies": { + "escodegen": { + "version": "0.0.28", + "from": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.28.tgz", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.28.tgz", + "dependencies": { + "esprima": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz" + }, + "estraverse": { + "version": "1.3.2", + "from": "https://registry.npmjs.org/estraverse/-/estraverse-1.3.2.tgz", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-1.3.2.tgz" + }, + "source-map": { + "version": "0.5.6", + "from": "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz" + } + } + } + } + } + } + }, + "through2": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/through2/-/through2-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/through2/-/through2-0.4.2.tgz", + "dependencies": { + "readable-stream": { + "version": "1.0.34", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + }, + "xtend": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz", + "dependencies": { + "object-keys": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz" + } + } + } + } + } + } + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + } + } + }, + "fh-logger": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.1.tgz", + "dependencies": { + "bunyan": { + "version": "1.8.1", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dependencies": { + "nan": { + "version": "2.3.5", + "from": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "minimatch": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.5", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.1", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.3.3", + "from": "https://registry.npmjs.org/once/-/once-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.3.3.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + }, + "moment": { + "version": "2.13.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz" + } + } + }, + "continuation-local-storage": { + "version": "3.1.7", + "from": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "dependencies": { + "async-listener": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + }, + "emitter-listener": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + } + } + }, + "fh-mbaas-middleware": { + "version": "2.2.7", + "from": "https://registry.npmjs.org/fh-mbaas-middleware/-/fh-mbaas-middleware-2.2.7.tgz", + "resolved": "https://registry.npmjs.org/fh-mbaas-middleware/-/fh-mbaas-middleware-2.2.7.tgz", + "dependencies": { + "async": { + "version": "0.9.2", + "from": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz" + }, + "cuid": { + "version": "1.3.8", + "from": "https://registry.npmjs.org/cuid/-/cuid-1.3.8.tgz", + "resolved": "https://registry.npmjs.org/cuid/-/cuid-1.3.8.tgz", + "dependencies": { + "browser-fingerprint": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/browser-fingerprint/-/browser-fingerprint-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/browser-fingerprint/-/browser-fingerprint-0.0.1.tgz" + }, + "core-js": { + "version": "1.2.7", + "from": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz" + }, + "node-fingerprint": { + "version": "0.0.2", + "from": "https://registry.npmjs.org/node-fingerprint/-/node-fingerprint-0.0.2.tgz", + "resolved": "https://registry.npmjs.org/node-fingerprint/-/node-fingerprint-0.0.2.tgz" + } + } + }, + "express": { + "version": "4.14.0", + "from": "https://registry.npmjs.org/express/-/express-4.14.0.tgz", + "resolved": "https://registry.npmjs.org/express/-/express-4.14.0.tgz", + "dependencies": { + "accepts": { + "version": "1.3.3", + "from": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz", + "dependencies": { + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "negotiator": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz" + } + } + }, + "array-flatten": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + }, + "content-disposition": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.1.tgz" + }, + "content-type": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.2.tgz" + }, + "cookie": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz" + }, + "cookie-signature": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "dependencies": { + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + } + } + }, + "depd": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz" + }, + "encodeurl": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz" + }, + "escape-html": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" + }, + "etag": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/etag/-/etag-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.7.0.tgz" + }, + "finalhandler": { + "version": "0.5.0", + "from": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-0.5.0.tgz", + "dependencies": { + "statuses": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz" + }, + "unpipe": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" + } + } + }, + "fresh": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/fresh/-/fresh-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.3.0.tgz" + }, + "merge-descriptors": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" + }, + "methods": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" + }, + "on-finished": { + "version": "2.3.0", + "from": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "dependencies": { + "ee-first": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" + } + } + }, + "parseurl": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.1.tgz" + }, + "path-to-regexp": { + "version": "0.1.7", + "from": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" + }, + "proxy-addr": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.1.2.tgz", + "dependencies": { + "forwarded": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.0.tgz" + }, + "ipaddr.js": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.1.1.tgz" + } + } + }, + "qs": { + "version": "6.2.0", + "from": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz" + }, + "range-parser": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + }, + "send": { + "version": "0.14.1", + "from": "https://registry.npmjs.org/send/-/send-0.14.1.tgz", + "resolved": "https://registry.npmjs.org/send/-/send-0.14.1.tgz", + "dependencies": { + "destroy": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz" + }, + "http-errors": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/http-errors/-/http-errors-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.5.0.tgz", + "dependencies": { + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "setprototypeof": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.1.tgz" + } + } + }, + "mime": { + "version": "1.3.4", + "from": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + }, + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "statuses": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz" + } + } + }, + "serve-static": { + "version": "1.11.1", + "from": "https://registry.npmjs.org/serve-static/-/serve-static-1.11.1.tgz", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.11.1.tgz" + }, + "type-is": { + "version": "1.6.13", + "from": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.13.tgz", + "dependencies": { + "media-typer": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + } + } + }, + "utils-merge": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.0.tgz" + }, + "vary": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.0.tgz" + } + } + }, + "fh-amqp-js": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/fh-amqp-js/-/fh-amqp-js-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/fh-amqp-js/-/fh-amqp-js-0.7.1.tgz", + "dependencies": { + "amqp": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/amqp/-/amqp-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/amqp/-/amqp-0.2.0.tgz", + "dependencies": { + "lodash": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-1.3.1.tgz" + } + } + }, + "async": { + "version": "0.2.7", + "from": "https://registry.npmjs.org/async/-/async-0.2.7.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-0.2.7.tgz" + }, + "lodash": { + "version": "2.4.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.4.1.tgz" + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "rc": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/rc/-/rc-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/rc/-/rc-0.1.1.tgz", + "dependencies": { + "optimist": { + "version": "0.3.7", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + } + } + }, + "deep-extend": { + "version": "0.2.11", + "from": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "ini": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz" + } + } + } + } + }, + "fh-cls-mongoose": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/fh-cls-mongoose/-/fh-cls-mongoose-2.1.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.1.0.tgz" + } + } + }, + "fh-logger": { + "version": "0.5.0", + "from": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.0.tgz", + "resolved": "https://registry.npmjs.org/fh-logger/-/fh-logger-0.5.0.tgz", + "dependencies": { + "bunyan": { + "version": "1.8.1", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.1.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz", + "dependencies": { + "nan": { + "version": "2.3.5", + "from": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.3.5.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "mkdirp": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dependencies": { + "minimist": { + "version": "0.0.8", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } + }, + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.5.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz" + }, + "minimatch": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.5", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.1", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.1.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.3.3", + "from": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "resolved": "http://registry.npmjs.org/once/-/once-1.3.3.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + }, + "moment": { + "version": "2.13.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.13.0.tgz" + } + } + }, + "continuation-local-storage": { + "version": "3.1.7", + "from": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "resolved": "https://registry.npmjs.org/continuation-local-storage/-/continuation-local-storage-3.1.7.tgz", + "dependencies": { + "async-listener": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/async-listener/-/async-listener-0.6.0.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + }, + "emitter-listener": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.0.1.tgz", + "dependencies": { + "shimmer": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.0.0.tgz" + } + } + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + } + } + }, + "moment": { + "version": "2.16.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz" + }, + "mongodb": { + "version": "2.1.18", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "dependencies": { + "es6-promise": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz" + }, + "mongodb-core": { + "version": "1.3.18", + "from": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "require_optional": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "dependencies": { + "semver": { + "version": "5.3.0", + "from": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" + }, + "resolve-from": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "1.0.31", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + }, + "mongoose": { + "version": "4.5.0", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "dependencies": { + "async": { + "version": "1.5.2", + "from": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz" + }, + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "hooks-fixed": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz" + }, + "kareem": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz" + }, + "mpath": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz" + }, + "mpromise": { + "version": "0.5.5", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz" + }, + "mquery": { + "version": "1.11.0", + "from": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "dependencies": { + "bluebird": { + "version": "2.10.2", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + } + } + }, + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "muri": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + }, + "sliced": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz" + } + } + }, + "mongoose-filter-denormalize": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mongoose-filter-denormalize/-/mongoose-filter-denormalize-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mongoose-filter-denormalize/-/mongoose-filter-denormalize-0.2.1.tgz", + "dependencies": { + "lodash": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz" + }, + "mongoose": { + "version": "3.6.20", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-3.6.20.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-3.6.20.tgz", + "dependencies": { + "hooks": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz" + }, + "mongodb": { + "version": "1.3.19", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "dependencies": { + "bson": { + "version": "0.2.2", + "from": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz" + }, + "kerberos": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz" + } + } + }, + "ms": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + }, + "muri": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz" + }, + "mpromise": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.2.1.tgz", + "dependencies": { + "sliced": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.4.tgz" + } + } + }, + "mpath": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + } + } + }, + "sanitizer": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/sanitizer/-/sanitizer-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/sanitizer/-/sanitizer-0.1.3.tgz" + } + } + }, + "mongoose-timestamp": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/mongoose-timestamp/-/mongoose-timestamp-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose-timestamp/-/mongoose-timestamp-0.3.0.tgz" + }, + "mongoose-unique-validator": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/mongoose-unique-validator/-/mongoose-unique-validator-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose-unique-validator/-/mongoose-unique-validator-0.3.0.tgz" + }, + "mongoose-validator": { + "version": "1.2.5", + "from": "https://registry.npmjs.org/mongoose-validator/-/mongoose-validator-1.2.5.tgz", + "resolved": "https://registry.npmjs.org/mongoose-validator/-/mongoose-validator-1.2.5.tgz", + "dependencies": { + "validator": { + "version": "4.9.0", + "from": "https://registry.npmjs.org/validator/-/validator-4.9.0.tgz", + "resolved": "https://registry.npmjs.org/validator/-/validator-4.9.0.tgz", + "dependencies": { + "depd": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.0.tgz" + } + } + } + } + }, + "nodemailer": { + "version": "2.6.4", + "from": "https://registry.npmjs.org/nodemailer/-/nodemailer-2.6.4.tgz", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-2.6.4.tgz", + "dependencies": { + "libmime": { + "version": "2.1.0", + "from": "https://registry.npmjs.org/libmime/-/libmime-2.1.0.tgz", + "resolved": "https://registry.npmjs.org/libmime/-/libmime-2.1.0.tgz", + "dependencies": { + "iconv-lite": { + "version": "0.4.13", + "from": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.13.tgz", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.13.tgz" + }, + "libbase64": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz" + }, + "libqp": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz" + } + } + }, + "mailcomposer": { + "version": "3.12.0", + "from": "https://registry.npmjs.org/mailcomposer/-/mailcomposer-3.12.0.tgz", + "resolved": "https://registry.npmjs.org/mailcomposer/-/mailcomposer-3.12.0.tgz", + "dependencies": { + "buildmail": { + "version": "3.10.0", + "from": "https://registry.npmjs.org/buildmail/-/buildmail-3.10.0.tgz", + "resolved": "https://registry.npmjs.org/buildmail/-/buildmail-3.10.0.tgz", + "dependencies": { + "addressparser": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/addressparser/-/addressparser-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/addressparser/-/addressparser-1.0.1.tgz" + }, + "libbase64": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/libbase64/-/libbase64-0.1.0.tgz" + }, + "libqp": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/libqp/-/libqp-1.1.0.tgz" + }, + "nodemailer-fetch": { + "version": "1.6.0", + "from": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz" + } + } + } + } + }, + "nodemailer-direct-transport": { + "version": "3.3.2", + "from": "https://registry.npmjs.org/nodemailer-direct-transport/-/nodemailer-direct-transport-3.3.2.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-direct-transport/-/nodemailer-direct-transport-3.3.2.tgz", + "dependencies": { + "smtp-connection": { + "version": "2.12.0", + "from": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "dependencies": { + "httpntlm": { + "version": "1.6.1", + "from": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "dependencies": { + "httpreq": { + "version": "0.4.22", + "from": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz", + "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz" + }, + "underscore": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + } + } + } + } + } + } + }, + "nodemailer-shared": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/nodemailer-shared/-/nodemailer-shared-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-shared/-/nodemailer-shared-1.1.0.tgz", + "dependencies": { + "nodemailer-fetch": { + "version": "1.6.0", + "from": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-fetch/-/nodemailer-fetch-1.6.0.tgz" + } + } + }, + "nodemailer-smtp-pool": { + "version": "2.8.2", + "from": "https://registry.npmjs.org/nodemailer-smtp-pool/-/nodemailer-smtp-pool-2.8.2.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-smtp-pool/-/nodemailer-smtp-pool-2.8.2.tgz", + "dependencies": { + "nodemailer-wellknown": { + "version": "0.1.10", + "from": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz" + }, + "smtp-connection": { + "version": "2.12.0", + "from": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "dependencies": { + "httpntlm": { + "version": "1.6.1", + "from": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "dependencies": { + "httpreq": { + "version": "0.4.22", + "from": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz", + "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz" + }, + "underscore": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + } + } + } + } + } + } + }, + "nodemailer-smtp-transport": { + "version": "2.7.2", + "from": "https://registry.npmjs.org/nodemailer-smtp-transport/-/nodemailer-smtp-transport-2.7.2.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-smtp-transport/-/nodemailer-smtp-transport-2.7.2.tgz", + "dependencies": { + "nodemailer-wellknown": { + "version": "0.1.10", + "from": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-wellknown/-/nodemailer-wellknown-0.1.10.tgz" + }, + "smtp-connection": { + "version": "2.12.0", + "from": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "resolved": "https://registry.npmjs.org/smtp-connection/-/smtp-connection-2.12.0.tgz", + "dependencies": { + "httpntlm": { + "version": "1.6.1", + "from": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "resolved": "https://registry.npmjs.org/httpntlm/-/httpntlm-1.6.1.tgz", + "dependencies": { + "httpreq": { + "version": "0.4.22", + "from": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz", + "resolved": "https://registry.npmjs.org/httpreq/-/httpreq-0.4.22.tgz" + }, + "underscore": { + "version": "1.7.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz" + } + } + } + } + } + } + }, + "socks": { + "version": "1.1.9", + "from": "https://registry.npmjs.org/socks/-/socks-1.1.9.tgz", + "resolved": "https://registry.npmjs.org/socks/-/socks-1.1.9.tgz", + "dependencies": { + "ip": { + "version": "1.1.4", + "from": "https://registry.npmjs.org/ip/-/ip-1.1.4.tgz", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.4.tgz" + }, + "smart-buffer": { + "version": "1.0.11", + "from": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.0.11.tgz", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.0.11.tgz" + } + } + } + } + }, + "nodemailer-sendgrid-transport": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/nodemailer-sendgrid-transport/-/nodemailer-sendgrid-transport-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/nodemailer-sendgrid-transport/-/nodemailer-sendgrid-transport-0.2.0.tgz", + "dependencies": { + "sendgrid": { + "version": "1.9.2", + "from": "https://registry.npmjs.org/sendgrid/-/sendgrid-1.9.2.tgz", + "resolved": "https://registry.npmjs.org/sendgrid/-/sendgrid-1.9.2.tgz", + "dependencies": { + "mime": { + "version": "1.3.4", + "from": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz" + }, + "lodash": { + "version": "3.10.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz" + }, + "smtpapi": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/smtpapi/-/smtpapi-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/smtpapi/-/smtpapi-1.2.0.tgz" + } + } + } + } + }, + "optimist": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + }, + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + } + } + }, + "rc": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/rc/-/rc-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/rc/-/rc-0.4.0.tgz", + "dependencies": { + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + }, + "deep-extend": { + "version": "0.2.11", + "from": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "strip-json-comments": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz" + }, + "ini": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz" + } + } + }, + "redis": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/redis/-/redis-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/redis/-/redis-0.8.2.tgz" + }, + "request": { + "version": "2.78.0", + "from": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "dependencies": { + "aws-sign2": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "aws4": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" + }, + "caseless": { + "version": "0.11.0", + "from": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "extend": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "dependencies": { + "asynckit": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + } + } + }, + "har-validator": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "dependencies": { + "chalk": { + "version": "1.1.3", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.15.0", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "hawk": { + "version": "3.1.3", + "from": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "http-signature": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "dependencies": { + "assert-plus": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "jsprim": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "dependencies": { + "extsprintf": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "verror": { + "version": "1.3.6", + "from": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + } + } + }, + "sshpk": { + "version": "1.10.1", + "from": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "dependencies": { + "asn1": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "dashdash": { + "version": "1.14.0", + "from": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz" + }, + "getpass": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz" + }, + "jsbn": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "tweetnacl": { + "version": "0.14.3", + "from": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz" + }, + "jodid25519": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz" + }, + "ecc-jsbn": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz" + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "oauth-sign": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "qs": { + "version": "6.3.0", + "from": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "tough-cookie": { + "version": "2.3.2", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } + }, + "tunnel-agent": { + "version": "0.4.3", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + } + } + }, + "underscore": { + "version": "1.8.3", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz" + } + } + }, + "fh-messaging-client": { + "version": "1.0.4", + "from": "https://registry.npmjs.org/fh-messaging-client/-/fh-messaging-client-1.0.4.tgz", + "resolved": "https://registry.npmjs.org/fh-messaging-client/-/fh-messaging-client-1.0.4.tgz", + "dependencies": { + "request": { + "version": "2.78.0", + "from": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "dependencies": { + "aws-sign2": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "aws4": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" + }, + "caseless": { + "version": "0.11.0", + "from": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "extend": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "dependencies": { + "asynckit": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + } + } + }, + "har-validator": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "dependencies": { + "chalk": { + "version": "1.1.3", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.15.0", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "hawk": { + "version": "3.1.3", + "from": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "http-signature": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "dependencies": { + "assert-plus": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "jsprim": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "dependencies": { + "extsprintf": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "verror": { + "version": "1.3.6", + "from": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + } + } + }, + "sshpk": { + "version": "1.10.1", + "from": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "dependencies": { + "asn1": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "dashdash": { + "version": "1.14.0", + "from": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz" + }, + "getpass": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz" + }, + "jsbn": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "tweetnacl": { + "version": "0.14.3", + "from": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz" + }, + "jodid25519": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz" + }, + "ecc-jsbn": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz" + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "oauth-sign": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "qs": { + "version": "6.3.0", + "from": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "tough-cookie": { + "version": "2.3.2", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } + }, + "tunnel-agent": { + "version": "0.4.3", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + } + } + }, + "underscore": { + "version": "1.8.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz" + } + } + }, + "fh-metrics-client": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/fh-metrics-client/-/fh-metrics-client-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/fh-metrics-client/-/fh-metrics-client-1.0.3.tgz", + "dependencies": { + "request": { + "version": "2.78.0", + "from": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.78.0.tgz", + "dependencies": { + "aws-sign2": { + "version": "0.6.0", + "from": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "aws4": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" + }, + "caseless": { + "version": "0.11.0", + "from": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "extend": { + "version": "3.0.0", + "from": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "dependencies": { + "asynckit": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" + } + } + }, + "har-validator": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "dependencies": { + "chalk": { + "version": "1.1.3", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.15.0", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "hawk": { + "version": "3.1.3", + "from": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "http-signature": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "dependencies": { + "assert-plus": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "jsprim": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "dependencies": { + "extsprintf": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "verror": { + "version": "1.3.6", + "from": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + } + } + }, + "sshpk": { + "version": "1.10.1", + "from": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "dependencies": { + "asn1": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "dashdash": { + "version": "1.14.0", + "from": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz" + }, + "getpass": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz" + }, + "jsbn": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "tweetnacl": { + "version": "0.14.3", + "from": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz" + }, + "jodid25519": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz" + }, + "ecc-jsbn": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz" + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "oauth-sign": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "qs": { + "version": "6.3.0", + "from": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.3.0.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "tough-cookie": { + "version": "2.3.2", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.2.tgz", + "dependencies": { + "punycode": { + "version": "1.4.1", + "from": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" + } + } + }, + "tunnel-agent": { + "version": "0.4.3", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + } + } + }, + "underscore": { + "version": "1.8.0", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.0.tgz" + }, + "moment": { + "version": "2.16.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz" + } + } + }, + "fh-service-auth": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/fh-service-auth/-/fh-service-auth-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/fh-service-auth/-/fh-service-auth-1.0.3.tgz", + "dependencies": { + "bunyan": { + "version": "1.8.5", + "from": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.5.tgz", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.5.tgz", + "dependencies": { + "dtrace-provider": { + "version": "0.8.0", + "from": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.0.tgz", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.0.tgz", + "dependencies": { + "nan": { + "version": "2.4.0", + "from": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.4.0.tgz" + } + } + }, + "mv": { + "version": "2.1.1", + "from": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "dependencies": { + "ncp": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz" + }, + "rimraf": { + "version": "2.4.5", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "dependencies": { + "glob": { + "version": "6.0.4", + "from": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "dependencies": { + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + } + } + } + } + }, + "safe-json-stringify": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz" + }, + "moment": { + "version": "2.16.0", + "from": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.16.0.tgz" + } + } + }, + "mongoose": { + "version": "4.4.19", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-4.4.19.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-4.4.19.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "hooks-fixed": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz" + }, + "kareem": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/kareem/-/kareem-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-1.0.1.tgz" + }, + "mpath": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz" + }, + "mpromise": { + "version": "0.5.5", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz" + }, + "mquery": { + "version": "1.10.0", + "from": "https://registry.npmjs.org/mquery/-/mquery-1.10.0.tgz", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-1.10.0.tgz", + "dependencies": { + "bluebird": { + "version": "2.10.2", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "http://npm.skunkhenry.com/debug/-/debug-2.2.0.tgz", + "resolved": "http://npm.skunkhenry.com/debug/-/debug-2.2.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + } + } + }, + "ms": { + "version": "0.7.1", + "from": "http://npm.skunkhenry.com/ms/-/ms-0.7.1.tgz", + "resolved": "http://npm.skunkhenry.com/ms/-/ms-0.7.1.tgz" + }, + "muri": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + }, + "sliced": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz" + } + } + } + } + }, + "mkdirp": { + "version": "0.5.1", + "from": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "dependencies": { + "minimist": { + "version": "0.0.8", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + } + } + }, + "mongodb": { + "version": "2.1.18", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-2.1.18.tgz", + "dependencies": { + "es6-promise": { + "version": "3.0.2", + "from": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.0.2.tgz" + }, + "mongodb-core": { + "version": "1.3.18", + "from": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-1.3.18.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "require_optional": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.0.tgz", + "dependencies": { + "semver": { + "version": "5.3.0", + "from": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" + }, + "resolve-from": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz" + } + } + } + } + }, + "readable-stream": { + "version": "1.0.31", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + }, + "mongodb-uri": { + "version": "0.9.7", + "from": "https://registry.npmjs.org/mongodb-uri/-/mongodb-uri-0.9.7.tgz", + "resolved": "https://registry.npmjs.org/mongodb-uri/-/mongodb-uri-0.9.7.tgz" + }, + "mongoose": { + "version": "4.5.0", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-4.5.0.tgz", + "dependencies": { + "bson": { + "version": "0.4.23", + "from": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.4.23.tgz" + }, + "hooks-fixed": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/hooks-fixed/-/hooks-fixed-1.1.0.tgz" + }, + "kareem": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-1.1.0.tgz" + }, + "mpath": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.2.1.tgz" + }, + "mpromise": { + "version": "0.5.5", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.5.5.tgz" + }, + "mquery": { + "version": "1.11.0", + "from": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-1.11.0.tgz", + "dependencies": { + "bluebird": { + "version": "2.10.2", + "from": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.10.2.tgz" + }, + "debug": { + "version": "2.2.0", + "from": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.2.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + } + } + }, + "ms": { + "version": "0.7.1", + "from": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "muri": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-1.1.0.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + }, + "sliced": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz" + } + } + }, + "mongoose-filter-denormalize": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mongoose-filter-denormalize/-/mongoose-filter-denormalize-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mongoose-filter-denormalize/-/mongoose-filter-denormalize-0.2.1.tgz", + "dependencies": { + "lodash": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-2.2.1.tgz" + }, + "mongoose": { + "version": "3.6.20", + "from": "https://registry.npmjs.org/mongoose/-/mongoose-3.6.20.tgz", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-3.6.20.tgz", + "dependencies": { + "hooks": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/hooks/-/hooks-0.2.1.tgz" + }, + "mongodb": { + "version": "1.3.19", + "from": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.3.19.tgz", + "dependencies": { + "bson": { + "version": "0.2.2", + "from": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz", + "resolved": "https://registry.npmjs.org/bson/-/bson-0.2.2.tgz" + }, + "kerberos": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/kerberos/-/kerberos-0.0.3.tgz" + } + } + }, + "ms": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/ms/-/ms-0.1.0.tgz" + }, + "sliced": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.5.tgz" + }, + "muri": { + "version": "0.3.1", + "from": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz", + "resolved": "https://registry.npmjs.org/muri/-/muri-0.3.1.tgz" + }, + "mpromise": { + "version": "0.2.1", + "from": "https://registry.npmjs.org/mpromise/-/mpromise-0.2.1.tgz", + "resolved": "https://registry.npmjs.org/mpromise/-/mpromise-0.2.1.tgz", + "dependencies": { + "sliced": { + "version": "0.0.4", + "from": "https://registry.npmjs.org/sliced/-/sliced-0.0.4.tgz", + "resolved": "https://registry.npmjs.org/sliced/-/sliced-0.0.4.tgz" + } + } + }, + "mpath": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.1.1.tgz" + }, + "regexp-clone": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-0.0.1.tgz" + } + } + }, + "sanitizer": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/sanitizer/-/sanitizer-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/sanitizer/-/sanitizer-0.1.3.tgz" + } + } + }, + "mongoose-timestamp": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/mongoose-timestamp/-/mongoose-timestamp-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose-timestamp/-/mongoose-timestamp-0.3.0.tgz" + }, + "mongoose-unique-validator": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/mongoose-unique-validator/-/mongoose-unique-validator-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/mongoose-unique-validator/-/mongoose-unique-validator-0.3.0.tgz" + }, + "mongoose-validator": { + "version": "1.0.3", + "from": "https://registry.npmjs.org/mongoose-validator/-/mongoose-validator-1.0.3.tgz", + "resolved": "https://registry.npmjs.org/mongoose-validator/-/mongoose-validator-1.0.3.tgz", + "dependencies": { + "validator": { + "version": "3.43.0", + "from": "https://registry.npmjs.org/validator/-/validator-3.43.0.tgz", + "resolved": "https://registry.npmjs.org/validator/-/validator-3.43.0.tgz" + } + } + }, + "multer": { + "version": "0.1.8", + "from": "https://registry.npmjs.org/multer/-/multer-0.1.8.tgz", + "resolved": "https://registry.npmjs.org/multer/-/multer-0.1.8.tgz", + "dependencies": { + "busboy": { + "version": "0.2.13", + "from": "https://registry.npmjs.org/busboy/-/busboy-0.2.13.tgz", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-0.2.13.tgz", + "dependencies": { + "dicer": { + "version": "0.2.5", + "from": "https://registry.npmjs.org/dicer/-/dicer-0.2.5.tgz", + "resolved": "https://registry.npmjs.org/dicer/-/dicer-0.2.5.tgz", + "dependencies": { + "streamsearch": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz" + } + } + }, + "readable-stream": { + "version": "1.1.14", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "isarray": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + } + } + } + } + }, + "mkdirp": { + "version": "0.3.5", + "from": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz" + }, + "qs": { + "version": "1.2.2", + "from": "https://registry.npmjs.org/qs/-/qs-1.2.2.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-1.2.2.tgz" + }, + "type-is": { + "version": "1.5.7", + "from": "https://registry.npmjs.org/type-is/-/type-is-1.5.7.tgz", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.5.7.tgz", + "dependencies": { + "media-typer": { + "version": "0.3.0", + "from": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" + }, + "mime-types": { + "version": "2.0.14", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz", + "dependencies": { + "mime-db": { + "version": "1.12.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz" + } + } + } + } + } + } + }, + "optimist": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "dependencies": { + "wordwrap": { + "version": "0.0.3", + "from": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" + }, + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + } + } + }, + "rc": { + "version": "0.4.0", + "from": "https://registry.npmjs.org/rc/-/rc-0.4.0.tgz", + "resolved": "https://registry.npmjs.org/rc/-/rc-0.4.0.tgz", + "dependencies": { + "minimist": { + "version": "0.0.10", + "from": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz" + }, + "deep-extend": { + "version": "0.2.11", + "from": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.2.11.tgz" + }, + "strip-json-comments": { + "version": "0.1.3", + "from": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-0.1.3.tgz" + }, + "ini": { + "version": "1.1.0", + "from": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.1.0.tgz" + } + } + }, + "request": { + "version": "2.74.0", + "from": "https://registry.npmjs.org/request/-/request-2.74.0.tgz", + "resolved": "https://registry.npmjs.org/request/-/request-2.74.0.tgz", + "dependencies": { + "aws-sign2": { + "version": "0.6.0", + "from": "http://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz", + "resolved": "http://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz" + }, + "aws4": { + "version": "1.5.0", + "from": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" + }, + "bl": { + "version": "1.1.2", + "from": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", + "dependencies": { + "readable-stream": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "isarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "process-nextick-args": { + "version": "1.0.7", + "from": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "string_decoder": { + "version": "0.10.31", + "from": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "util-deprecate": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + } + } + } + } + }, + "caseless": { + "version": "0.11.0", + "from": "http://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz", + "resolved": "http://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz" + }, + "combined-stream": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.5.tgz", + "dependencies": { + "delayed-stream": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" + } + } + }, + "extend": { + "version": "3.0.0", + "from": "http://registry.npmjs.org/extend/-/extend-3.0.0.tgz", + "resolved": "http://registry.npmjs.org/extend/-/extend-3.0.0.tgz" + }, + "forever-agent": { + "version": "0.6.1", + "from": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" + }, + "form-data": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/form-data/-/form-data-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.1.tgz", + "dependencies": { + "async": { + "version": "2.1.2", + "from": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-2.1.2.tgz", + "dependencies": { + "lodash": { + "version": "4.16.4", + "from": "https://registry.npmjs.org/lodash/-/lodash-4.16.4.tgz", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.16.4.tgz" + } + } + } + } + }, + "har-validator": { + "version": "2.0.6", + "from": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz", + "dependencies": { + "chalk": { + "version": "1.1.3", + "from": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "dependencies": { + "ansi-styles": { + "version": "2.2.1", + "from": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "escape-string-regexp": { + "version": "1.0.5", + "from": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + }, + "has-ansi": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "from": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "dependencies": { + "ansi-regex": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + } + } + }, + "supports-color": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + } + } + }, + "commander": { + "version": "2.9.0", + "from": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz", + "dependencies": { + "graceful-readlink": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/graceful-readlink/-/graceful-readlink-1.0.1.tgz" + } + } + }, + "is-my-json-valid": { + "version": "2.15.0", + "from": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz", + "dependencies": { + "generate-function": { + "version": "2.0.0", + "from": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz", + "resolved": "https://registry.npmjs.org/generate-function/-/generate-function-2.0.0.tgz" + }, + "generate-object-property": { + "version": "1.2.0", + "from": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/generate-object-property/-/generate-object-property-1.2.0.tgz", + "dependencies": { + "is-property": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz" + } + } + }, + "jsonpointer": { + "version": "4.0.0", + "from": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-4.0.0.tgz" + }, + "xtend": { + "version": "4.0.1", + "from": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + } + } + }, + "pinkie-promise": { + "version": "2.0.1", + "from": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "dependencies": { + "pinkie": { + "version": "2.0.4", + "from": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + } + } + } + } + }, + "hawk": { + "version": "3.1.3", + "from": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", + "dependencies": { + "hoek": { + "version": "2.16.3", + "from": "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz", + "resolved": "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz" + }, + "boom": { + "version": "2.10.1", + "from": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz", + "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz" + }, + "cryptiles": { + "version": "2.0.5", + "from": "http://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz", + "resolved": "http://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz" + }, + "sntp": { + "version": "1.0.9", + "from": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + } + } + }, + "http-signature": { + "version": "1.1.1", + "from": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz", + "dependencies": { + "assert-plus": { + "version": "0.2.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "jsprim": { + "version": "1.3.1", + "from": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.1.tgz", + "dependencies": { + "extsprintf": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.0.2.tgz" + }, + "json-schema": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz" + }, + "verror": { + "version": "1.3.6", + "from": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + } + } + }, + "sshpk": { + "version": "1.10.1", + "from": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.10.1.tgz", + "dependencies": { + "asn1": { + "version": "0.2.3", + "from": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "assert-plus": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "dashdash": { + "version": "1.14.0", + "from": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.0.tgz" + }, + "getpass": { + "version": "0.1.6", + "from": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz" + }, + "jsbn": { + "version": "0.1.0", + "from": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "tweetnacl": { + "version": "0.14.3", + "from": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.3.tgz" + }, + "jodid25519": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/jodid25519/-/jodid25519-1.0.2.tgz" + }, + "ecc-jsbn": { + "version": "0.1.1", + "from": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz" + }, + "bcrypt-pbkdf": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz" + } + } + } + } + }, + "is-typedarray": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" + }, + "isstream": { + "version": "0.1.2", + "from": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz" + }, + "json-stringify-safe": { + "version": "5.0.1", + "from": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "mime-types": { + "version": "2.1.12", + "from": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.12.tgz", + "dependencies": { + "mime-db": { + "version": "1.24.0", + "from": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.24.0.tgz" + } + } + }, + "node-uuid": { + "version": "1.4.7", + "from": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "resolved": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "oauth-sign": { + "version": "0.8.2", + "from": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "qs": { + "version": "6.2.1", + "from": "https://registry.npmjs.org/qs/-/qs-6.2.1.tgz", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.2.1.tgz" + }, + "stringstream": { + "version": "0.0.5", + "from": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "tough-cookie": { + "version": "2.3.1", + "from": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.1.tgz", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.1.tgz" + }, + "tunnel-agent": { + "version": "0.4.3", + "from": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + } + } + }, + "rimraf": { + "version": "2.5.2", + "from": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.2.tgz", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.2.tgz", + "dependencies": { + "glob": { + "version": "7.1.1", + "from": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.1.tgz", + "dependencies": { + "fs.realpath": { + "version": "1.0.0", + "from": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + }, + "inflight": { + "version": "1.0.6", + "from": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "inherits": { + "version": "2.0.3", + "from": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" + }, + "minimatch": { + "version": "3.0.3", + "from": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "dependencies": { + "brace-expansion": { + "version": "1.1.6", + "from": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz", + "dependencies": { + "balanced-match": { + "version": "0.4.2", + "from": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz" + }, + "concat-map": { + "version": "0.0.1", + "from": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + } + } + } + } + }, + "once": { + "version": "1.4.0", + "from": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "dependencies": { + "wrappy": { + "version": "1.0.2", + "from": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + } + } + }, + "path-is-absolute": { + "version": "1.0.1", + "from": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + } + } + } + } + }, + "underscore": { + "version": "1.8.3", + "from": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..cca2e3d --- /dev/null +++ b/package.json @@ -0,0 +1,72 @@ +{ + "name": "fh-mbaas", + "version": "5.3.12-BUILD-NUMBER", + "description": "", + "main": "index.js", + "author": "FeedHenry", + "license": "Apache-2.0", + "scripts": { + "install": "scripts/install.sh", + "postinstall": "scripts/postinstall.sh", + "start": "node fh-mbaas.js config/conf.json --master-only" + }, + "bin": { + "fh-mbaas": "./fh-mbaas.js" + }, + "engines": { + "node": "4.4" + }, + "preferGlobal": true, + "dependencies": { + "archiver": "1.2.0", + "async": "1.5.2", + "body-parser": "1.14.1", + "bunyan": "1.5.1", + "cors": "2.7.1", + "cuid": "1.3.8", + "diskspace": "1.0.2", + "express": "4.14.0", + "express-bunyan-logger": "1.2.0", + "express-paginate": "0.2.0", + "fh-agenda": "0.9.0", + "fh-amqp-js": "0.7.1", + "fh-cls-mongoose": "2.1.0", + "fh-cluster": "0.3.0", + "fh-component-metrics": "2.2.1", + "fh-config": "1.0.3", + "fh-forms": "1.10.8", + "fh-health": "0.2.0", + "fh-logger": "0.5.1", + "fh-mbaas-middleware": "2.2.7", + "fh-messaging-client": "1.0.4", + "fh-metrics-client": "1.0.3", + "fh-service-auth": "1.0.3", + "mkdirp": "0.5.1", + "mongodb": "2.1.18", + "mongodb-uri": "0.9.7", + "mongoose": "4.5.0", + "mongoose-filter-denormalize": "0.2.1", + "mongoose-timestamp": "0.3.0", + "mongoose-unique-validator": "0.3.0", + "mongoose-validator": "1.0.3", + "multer": "0.1.8", + "optimist": "0.6.1", + "rc": "0.4.0", + "request": "2.74.0", + "rimraf": "2.5.2", + "underscore": "1.8.3" + }, + "devDependencies": { + "grunt": "^1.0.1", + "grunt-fh-build": "^1.0.2", + "istanbul": "0.4.3", + "mocha": "^2.3.3", + "mockgoose": "http://npm.skunkhenry.com/mockgoose/-/mockgoose-6.1.0.tgz", + "proxyquire": "^1.4.0", + "should": "2.1.1", + "sinon": "1.17.0", + "supertest": "1.1.0", + "turbo-test-runner": "http://npm.skunkhenry.com/turbo-test-runner/-/turbo-test-runner-0.6.3.tgz", + "deep-equal": "^1.0.1" + } +} diff --git a/scripts/fh-mbaas b/scripts/fh-mbaas new file mode 100755 index 0000000..ae50b5b --- /dev/null +++ b/scripts/fh-mbaas @@ -0,0 +1,247 @@ +#!/bin/bash +###################################################################### +# +# Control script for FeedHenry component: fh-mbaas +# +###################################################################### +if [ "$(id -u)" != "0" ]; then + echo "This script must be run as root" 1>&2 + exit 1 +fi + +################################################ +# GLOBALS +################################################ +export PATH=/usr/local/bin:$PATH + +# Operating System +OSNAME=`uname -s` + +# Service Essentials +PS_DESC=FeedHenry-mbaas +PS_TAG=fh-mbaas +PS_PARENT=fh-mbaas + +# User/Group Details +PS_SPAWN_USR=henryd +PS_DAEMON_USR=henryd +PS_DAEMON_GRP=henryg +PS_DAEMON_UID=`id -u $PS_DAEMON_USR` +PS_DAEMON_GID=`id -g $PS_DAEMON_USR` + +# Service Files +PS_BIN=/usr/local/bin/$PS_TAG +PS_LAUNCHER=/usr/local/bin/${PS_TAG}-launcher.sh +PS_PID=/var/run/$PS_TAG.pid +PS_CFG=/etc/feedhenry/$PS_PARENT/conf.json +PS_CON=/var/log/feedhenry/$PS_PARENT/$PS_TAG-console.log +PS_LOG=/var/log/feedhenry/$PS_PARENT/$PS_TAG.log + +# Service Limits +PS_ULIMIT=8192 +PS_UMASK=002 + +# Other +START_TIMEOUT=30 +STOP_TIMEOUT=10 +RESTART_DELAY=1 +START_COMPLETE_MSG="Started $PS_TAG" + +################################################ +# COMMAND OVERRIDES +################################################ +# Need ps command that has PID/PPID in columns 2/3 +case $OSNAME in + FreeBSD) PSCMD="/bin/ps -ejfww" ;; + Linux) PSCMD="/bin/ps -ejfww" ;; + SunOS) PSCMD="/usr/ucb/ps auxww" ;; + *) PSCMD="/bin/ps -ejf" ;; +esac + +################################################ +# ALREADY RUNNING ? +# Returns: 0 if running, 1 if not +################################################ +running () { + if [ -f $PS_PID ]; then + PID=`cat $PS_PID` + kill -0 $PID > /dev/null 2>&1 + return $? + else + return 1 + fi +} + +################################################ +# START +################################################ +do_start () { + echo -n "Starting $PS_DESC, please wait ..." + ulimit -n $PS_ULIMIT +#umask $PS_UMASK + # Need to pass the effective user/group by id to some components via environment variable + DAEMON_USER=$PS_DAEMON_UID DAEMON_GROUP=$PS_DAEMON_GID PS_BIN=$PS_BIN PS_CON=$PS_CON start-stop-daemon --start -b -m -p $PS_PID -x $PS_LAUNCHER -u $PS_DAEMON_USR -g $PS_DAEMON_GRP -c $PS_SPAWN_USR:$PS_DAEMON_GRP -- $PS_CFG + status=$? + if [ $status -eq 0 ]; then + # Wait for startup to complete + counter=$START_TIMEOUT + while [ $counter -gt 0 ]; do + if [ -f $PS_CON ]; then + if [ -n "`cat $PS_CON | grep -si \"$START_COMPLETE_MSG\"`" ]; then + break; + fi + fi + sleep 1 + echo -n "." + counter=`expr $counter - 1` + done + if [ $counter -eq 0 ]; then + echo " failed to start after $START_TIMEOUT seconds." + return 1 + else + echo " started in `expr $START_TIMEOUT - $counter` seconds." + return 0 + fi + else + return $status + fi +} + +################################################ +# STOP +################################################ +do_stop () { + echo -n "Stopping $PS_DESC, please wait ..." + start-stop-daemon -p $PS_PID -u $PS_DAEMON_USR --stop + status=$? + if [ $status -eq 0 ]; then + # Wait for stop to complete + counter=$STOP_TIMEOUT + while [ $counter -gt 0 ]; do + if running ; then + sleep 1 + echo -n "." + counter=`expr $counter - 1` + else + break + fi + done + if [ $counter -eq 0 ]; then + if [ -n "$ARG2" -a "$ARG2" = "--force" ]; then + echo " failed to stop after $STOP_TIMEOUT seconds, killing PID $PID manually." + kill -9 $PID + else + echo " failed to stop after $STOP_TIMEOUT seconds." + fi + return 1 + else + echo " stopped in `expr $STOP_TIMEOUT - $counter` seconds." + if [ -f $PS_PID ]; then rm $PS_PID; fi + return 0 + fi + else + return $status + fi +} + +################################################ +# MAIN +################################################ +OP=$1 +ARG2=$2 + +case "$OP" in + start) + if running ; then + echo "$PS_DESC is already running (PID=$PID)" + exit 0 + fi + if do_start ; then + exit 0 + else + exit 1 + fi + ;; + + stop) + if running ; then + if do_stop ; then + exit 0 + else + exit 1 + fi + else + echo "$PS_DESC is not running" + fi + ;; + + restart) + if running; then + do_stop + fi + sleep $RESTART_DELAY + do_start + ;; + + status) + if running; then + echo "$PS_DESC is running (PID=$PID)" + exit 0 + else + echo "$PS_DESC is not running" + exit 1 + fi + ;; + + kill) + if running ; then + echo "Killing $PS_DESC (PID=$PID) ..." + kill -9 $PID + else + echo "$PS_DESC is not running" + fi + exit 0 + ;; + + reload) + if running ; then + echo "Reloading config for $PS_DESC (PID=$PID) ... Check output in $PS_CON" + kill -SIGUSR1 $PID + else + echo "$PS_DESC is not running" + fi + exit 0 + ;; + + console) + if [ -f $PS_CON ]; then tail -f $PS_CON; fi + ;; + + log) + if [ -f $PS_LOG ]; then tail -f $PS_LOG; fi + ;; + + purge) + if [ -f $PS_CON ]; then rm -f $PS_CON; fi + if [ -f $PS_LOG ]; then rm -f $PS_LOG; fi + ;; + + proc) + if [ -z "$ARG2" ]; then echo "Please specify a process characteristic"; exit 1; fi + if running; then + cat /proc/$PID/$ARG2 + exit 0 + else + echo "$PS_DESC is not running" + exit 1 + fi + ;; + + *) + echo "Usage: $0 []" + echo " where : start|stop|restart|status|console|log|purge|kill|proc|reload" + echo " : (proc)|--force (kill)" + exit 1 + ;; +esac +exit 0 diff --git a/scripts/fh-mbaas-launcher.sh b/scripts/fh-mbaas-launcher.sh new file mode 100755 index 0000000..f04456e --- /dev/null +++ b/scripts/fh-mbaas-launcher.sh @@ -0,0 +1,7 @@ +#!/bin/bash +# +# Special helper script to be used in conjunction with /etc/init.d/fh-mbaas +# to ensure log output (sent to stdout,stderr) from a daemonized script is accessible. +# +umask 002 +exec /usr/local/bin/fh-mbaas $* > /var/log/feedhenry/fh-mbaas/fh-mbaas-console.log 2>&1 diff --git a/scripts/install.sh b/scripts/install.sh new file mode 100755 index 0000000..07d942d --- /dev/null +++ b/scripts/install.sh @@ -0,0 +1,19 @@ +#!/bin/sh +if [ "$(id -u)" != "0" ]; then + echo "NOT running post-install script since not ROOT user" + exit +fi + +OSNAME=`uname -s` +case $OSNAME in + Linux) + echo Installing startup script to /etc/init.d/fh-mbaas + cp scripts/fh-mbaas /etc/init.d + echo Initialising - update-rc.d fh-mbaas defaults 80 + update-rc.d fh-mbaas defaults 80 + ;; + *) + echo no post-installation for OS $OSNAME + ;; +esac + diff --git a/scripts/postinstall.sh b/scripts/postinstall.sh new file mode 100755 index 0000000..41184e8 --- /dev/null +++ b/scripts/postinstall.sh @@ -0,0 +1,19 @@ +#!/bin/sh +# PostInstall Script for fh-mbaas + +# Service Management (root only) +if [ "$(id -u)" = "0" ]; then + OSNAME=`uname -s` + case $OSNAME in + Linux) + echo "Installing Service Control Scripts" + cp ./scripts/fh-mbaas /etc/init.d + cp ./scripts/fh-mbaas-launcher.sh /usr/local/bin + echo Initialising - update-rc.d fh-mbaas defaults 80 + update-rc.d fh-mbaas defaults 80 + ;; + *) + # Reserved for future use + ;; + esac +fi diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..c3b2849 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,10 @@ +sonar.projectKey=fh-mbaas +sonar.projectName=fh-mbaas-nightly-master +sonar.projectVersion=5.3.12 + +sonar.sources=./lib +sonar.tests=./test +sonar.language=js +sonar.javascript.lcov.reportPath=./coverage/lcov.info + +sonar.exclusions=**/node_modules/**/*.js diff --git a/test/accept/common.js b/test/accept/common.js new file mode 100755 index 0000000..3b7ae06 --- /dev/null +++ b/test/accept/common.js @@ -0,0 +1,47 @@ +var request = require('request'); +var url = exports.baseUrl = process.env['url'] || "http://127.0.0.1:18819/"; + +function post(endPoint, data, cb) { + request.post({url: url + endPoint, json: data}, function(err, response, data) { + if (err) return cb(err + ' - ' + data); + if (response.statusCode !== 200) return cb(response.statusCode + ' - ' + data); + return cb(null, data); + }); +} + +function put(endPoint, data, cb) { + request.put({url: url + endPoint, json: data}, function(err, response, data) { + if (err) return cb(err + ' - ' + data); + if (response.statusCode !== 200) return cb(response.statusCode + ' - ' + data); + return cb(null, data); + }); +} + +function get(endPoint, cb) { + request.get(url + endPoint, function(err, response, data) { + if (err) return cb(err + ' - ' + data); + if (response.statusCode !== 200) return cb(response.statusCode + ' - ' + data); + + var ret; + try { + ret = JSON.parse(data); + } catch(x) { + ret = data; + } + + return cb(null, ret); + }); +} + +function del(endPoint, cb) { + request.del(url + endPoint, function(err, response, data) { + if (err) return cb(err + ' - ' + data); + if (response.statusCode !== 200) return cb(response.statusCode + ' - ' + data); + return cb(null, data); + }); +} + +exports.post = post; +exports.put = put; +exports.get = get; +exports.del = del; \ No newline at end of file diff --git a/test/accept/server.js b/test/accept/server.js new file mode 100755 index 0000000..6312c80 --- /dev/null +++ b/test/accept/server.js @@ -0,0 +1,274 @@ +var assert = require('assert'); +var util = require('util'); +var express = require('express'); +var app = express(); +var cors = require('cors'); +var bodyParser = require('body-parser'); +var MongoClient = require('mongodb').MongoClient; +var async = require('async'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); +var fhLogger = require('fh-logger'); +var ditchServer; +var dynofarmServer; +var testConfig = require('../setup.js'); +var fhconfig = require('fh-config'); +var fhForms = require('fh-forms'); + + + + +var logger = fhLogger.createLogger({ + name: 'accept-test-logger', + streams:[ { + "type": "stream", + "src": true, + "level": "trace", + "stream": "process.stdout" + } ] +}); + +fhForms.init(logger); + +// used for the models init +var cfg = { + mongoUrl: 'mongodb://localhost:27017/test-fhmbaas-accept', + mongo:{ + host: 'localhost', + port: 27017, + name: 'test-fhmbaas-accept', + admin_auth: { + user: 'admin', + pass: 'admin' + } + }, + "fhmessaging":{ + "enabled": true, + "host":"localhost", + "protocol":"http", + "port":8803, + "path":"msg/TOPIC", + "cluster":"development", + "realtime": true, + "apikey":"secretkey", + "files":{ + "recovery_file":"../messages/recovery.log", + "backup_file":"../messages/backup.log" + } + }, + logger: logger +}; + + + +var auth = require('../../lib/middleware/auth.js'); +var dfutils = require('../../lib/util/dfutils.js'); + +app.use(cors()); +app.use(bodyParser.urlencoded({ + extended: false +})); +app.use(bodyParser.json()); +app.use('/api', auth.admin(fhconfig)); + +var server; + +var deleteAmdinUser = false; +var new_db_prefix = "fhmbaas-accept-test"; + + +function setupDitchServer(cb){ + var ditchApp = express(); + ditchApp.use(bodyParser.json()); + ditchApp.use('*', function(req, res){ + return res.json({}); + }); + ditchServer = ditchApp.listen(testConfig.ditchPort, function(){ + console.log('Ditch server is running on port ' + testConfig.ditchPort); + cb(); + }); +} + +function setupDynofarm(cb){ + var dynoApp = express(); + dynoApp.use(bodyParser.json()); + dynoApp.use('*', function(req, res){ + logger.info('[dynofarm] got request, url = ' + req.url); + return res.json([]); + }); + dynofarmServer = dynoApp.listen(testConfig.dynofarmPort, function(){ + console.log('Dynofarm server is running on port ' + testConfig.dynofarmPort); + cb(); + }); +} + +function connectDb(cb){ + var dburl = fhconfig.mongoConnectionString(); + MongoClient.connect(dburl, function(err, db){ + assert.ok(!err, 'Can not connect to mongodb : ' + util.inspect(err)); + return cb(err, db); + }); +} + +function createDBAdminUser(db, user, pass, cb){ + var adminDb = db.admin(); + adminDb.authenticate(user, pass, function(err, result){ + if(err){ + //create admin user, and mark for removal when test finishes + adminDb.addUser(user, pass, function(err, result){ + logger.info('Creating admin db user'); + assert.ok(!err, 'can not create admin user : ' + util.inspect(err)); + deleteAmdinUser = true; + cb(); + }); + } else { + //admin user already exists, continue + cb(); + } + }); +} + +function dropDBAdminUser(db, user, cb){ + if(deleteAmdinUser){ + var adminDb = db.admin(); + logger.info('Remove admin db user'); + adminDb.removeUser(user, function(err){ + cb(); + }); + } else { + cb(); + } +} + + +function dropCollections(db, collections, cb) { + async.each(collections, function(collection, cb){ + logger.info('Drop db collection ' + collection); + db.dropCollection(collection, function(err, results){ + cb(); + }); + }, cb); +} + +function dropDbForDomain(db, cb){ + var adminDb = db.admin(); + adminDb.listDatabases(function(err, dbs){ + assert.ok(!err, 'Failed to list databases: '+ util.inspect(err)); + //created by app env acceptance test, since no data is written, the db is not actually created, but the user is, so make sure it's removed + var doDbRemove = ['fhmbaas-accept-test-domain_test_appenvtest', 'test-fhmbaas-accept']; + dbs = dbs.databases; + for(var i=0;il@G#D6~m@*g|nHw1! zn-~}y0@WFs7#kWhC>T)NA=u0-EiOqcQcz&XNX;v%Ou?oCA_kUE$1OiZW#qd#ATDPP zPESZk@H*{t)-y4I+32)Tu~)F!-K+6B7!2EA)WMnp)|LKtg5&1v&yft5wg8C0|449b&GKN`F}~<^LP=$$vMCJ(#zi(Qozpf8THJDQ`&rz0&Jz z?b@wp3KK4hyArW)ryedn{9Iys<@977WiPo+|LQkyJg{QV!=f{uD@|7Gim*=eZM9YX z8@-pY`T_s->qn24Gtwi3hPNHS{I8prn43z=5`a+tH!wFsyu9SoXU<2qY%YiNG^l;j!9it&I8UmvsFd71*Aut*OqaiRF0;3@? z8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd71*Aut*OqaiRF0;3@?8UmvsFd70Rga7~< CmEBAL literal 0 HcmV?d00001 diff --git a/test/fixtures/appdata/index.js b/test/fixtures/appdata/index.js new file mode 100644 index 0000000..0600b7f --- /dev/null +++ b/test/fixtures/appdata/index.js @@ -0,0 +1,27 @@ +var _ = require('underscore'); +var baseJob = { + jobType: "export", + appid: 'appid', + environment: 'env', + domain: 'domain', + status: 'running', + step: 1, + totalSteps: 100, + metadata: { + fileSize: 80 * 1024 * 1024, + fileDeleted: false, + filePath: '/var/tmp/export1.tar.gz', + stopApp: false + }, + progress: { + collections: ['mbaas'] + } +}; + +exports.createJob = function(suffix) { + var data = _.clone(baseJob); + data.appid = data.appid + suffix; + data.environment = data.environment + suffix; + data.domain = data.domain + suffix; + return data; +}; \ No newline at end of file diff --git a/test/fixtures/config/index.js b/test/fixtures/config/index.js new file mode 100644 index 0000000..6ac3ce7 --- /dev/null +++ b/test/fixtures/config/index.js @@ -0,0 +1,89 @@ + +module.exports = { + "mongo": { + "enabled": true, + "name": "fh-mbaas", + "host": "localhost", + "port": 27017, + "replicaset_name": null, + "auth": { + "enabled": false, + "user": "", + "pass": "" + }, + "admin_auth": { + "user": "u-mbaas", + "pass": "password" + } + }, + "fhditch": { + "host": "localhost", + "port": 8802, + "protocol": "http" + }, + "fhdfc": { + "dynofarm": "http://localhost:9000", + "username":"FHDFC_USERNAME", + "_password": "FHDFC_PASSWORD", + "loglevel": "warn", + "cache_timeout": 300000 + }, + "fhamqp":{ + "enabled": true, + "max_connection_retry": 10, + "nodes":"localhost:5672", + "ssl": false, + "vhosts":{ + "events":{ + "name":"fhevents", + "user":"fheventuser", + "password":"fheventpassword" + } + }, + "app":{ + "enabled": false + } + }, + "fhmbaas": { + "mbaasid":"development", + "pagination": { + "maxLimit": 20 + }, + 'pdfExportDir': '/some/path', + "appdata_jobs" : { + "upload_dir" : "/tmp", + "scheduler": { + "concurrency": 1, + "frequency": "30 seconds" + }, + "stalled_job_finder": { + "frequency": "1 minute" + } + } + }, + "fhmessaging":{ + "enabled": true, + "host":"localhost", + "protocol":"http", + "port":8803, + "path":"msg/TOPIC", + "cluster":"development", + "realtime": true, + "files":{ + "recovery_file":"../messages/recovery.log", + "backup_file":"../messages/backup.log" + } + }, + "fhstats":{ + "enabled": false, + "host":"localhost", + "port": 8804, + "protocol": "http", + "apikey": "12345" + }, + "fhredis":{ + "host": "127.0.0.1", + "port": 6379, + "password":"FHREDIS_PASSWORD" + } +}; diff --git a/test/fixtures/forms/dataSources.js b/test/fixtures/forms/dataSources.js new file mode 100644 index 0000000..dbd105b --- /dev/null +++ b/test/fixtures/forms/dataSources.js @@ -0,0 +1,89 @@ + +var services = require('../services'); +var _ = require('underscore'); + +module.exports = { + get: function(){ + return { + _id: "somedatasource", + name: "Some Data Source", + serviceGuid: services.get().guid, + endpoint: "/someendpoint" + }; + }, + withData: function(){ + var ds = this.get(); + + ds.currentStatus = { + status: "ok" + }; + ds.data = this.dsDataSet(); + + return ds; + }, + withAuditLogs: function(){ + var ds = this.withData(); + + ds.auditLogs = [{ + updateTimestamp: new Date(), + serviceGuid: services.get().guid, + endpoint: ds.endpoint, + lastRefreshed: new Date(), + data: this.dsDataSet(), + dataHash: "123456", + currentStatus: { + status: "ok" + } + }]; + + return ds; + }, + auditLog: function(){ + var ds = this.get(); + return { + _id: "someauditlogid", + dataSource: ds._id, + updateTimestamp: new Date(), + serviceGuid: services.get().guid, + endpoint: ds.endpoint, + data: this.dsDataSet(), + lastRefreshed: new Date(), + dataHash: "123456", + currentStatus: { + status: "ok" + } + }; + }, + withAuditLogsNoData: function(){ + var ds = this.withData(); + + ds.auditLogs = [_.omit(this.auditLog(), 'data')]; + + return ds; + }, + dsDataSet: function(){ + return [{ + key: "dskey1", + value: "DS Value 1", + selected: false + },{ + key: "dskey2", + value: "DS Value 2", + selected: true + }]; + }, + withError: function(){ + var ds = this.get(); + + ds.currentStatus = { + status: "error", + error: { + code: "DS_ERROR", + userDetail: "Data Source Error", + systemDetail: "Data Source System Data" + } + }; + + return ds; + } +}; diff --git a/test/fixtures/forms/index.js b/test/fixtures/forms/index.js new file mode 100644 index 0000000..ff51d19 --- /dev/null +++ b/test/fixtures/forms/index.js @@ -0,0 +1,5 @@ + +module.exports = { + dataSources: require('./dataSources'), + submissions: require('./submissions') +}; diff --git a/test/fixtures/forms/submissions.js b/test/fixtures/forms/submissions.js new file mode 100644 index 0000000..5c1fdce --- /dev/null +++ b/test/fixtures/forms/submissions.js @@ -0,0 +1,103 @@ +var _ = require('underscore'); +var os = require('os'); + +var submission = { + "_id": "53ac7108b8f15d51516d14b0", + "appClientId": "OgF52REDBM9_ZbDwJjREK-yG", + "appCloudName": "test-t-ogf521234dv90ndm1p-dev", + "appEnvironment": "dev", + "appId": "OgF52MXjmVTjFJ5BRyWLHxy7", + "deviceFormTimestamp": "2014-06-20T14:43:18.722Z", + "deviceIPAddress": "213.233.150.90,10.189.254.5", + "deviceId": "3C5ECCB9-3ABE-4DEC-AD7E-35B11454F366", + "formId": "53a44886d55d83f96dad6ca8", + "masterFormTimestamp": "2014-06-20T14:43:18.722Z", + "timezoneOffset": -60, + "userId": null, + "formFields": [{ + "fieldId": "53a44886d55d83f96dad6c96", + "fieldValues": [30002144] + }, { + "fieldId": "53a44886d55d83f96dad6c97", + "fieldValues": ["OBL126"] + }, { + "fieldId": "53a44886d55d83f96dad6c9a", + "fieldValues": ["LMK-STSE"] + }, { + "fieldId": "53a44886d55d83f96dad6c95", + "fieldValues": ["Test text"] + }, { + "fieldId": "53a44886d55d83f96dad6c99", + "fieldValues": ["LMK"] + }, { + "fieldId": "53a44886d55d83f96dad6c94", + "fieldValues": ["TI"] + }, { + "fieldId": "53a44886d55d83f96dad6c98", + "fieldValues": ["Stone\nsome new line"] + }, { + "fieldId": "53a44886d55d83f96dad6c9b", + "fieldValues": ["Egan_C"] + }, { + "fieldId": "53a44886d55d83f96dad6c9c", + "fieldValues": ["Adjust"] + }, { + "fieldId": "53a44886d55d83f96dad6c9d", + "fieldValues": [""] + }, { + "fieldId": "53a44886d55d83f96dad6c9e", + "fieldValues": ["2014-07-26"] + }, { + "fieldId": "53a44886d55d83f96dad6c9f", + "fieldValues": ["2014-08-26"] + }, { + "fieldId": "53a44886d55d83f96dad6ca0", + "fieldValues": ["High"] + }, { + "fieldId": "53a44886d55d83f96dad6ca1", + "fieldValues": ["Abuttments"] + }, { + "fieldId": "53a44886d55d83f96dad6ca2", + "fieldValues": ["Bent"] + }, { + "fieldId": "53a44886d55d83f96dad6ca3", + "fieldValues": [""] + }, { + "fieldId": "53a44886d55d83f96dad6ca4", + "fieldValues": ["Authorised Work"] + }, { + "fieldId": "53a44886d55d83f96dad6ca5", + "fieldValues": [""] + }, { + "fieldId": "53a44886d55d83f96dad6ca6", + "fieldValues": [{ + "mbaasUrl": "/mbaas/forms/:appId/submission/:submissionId/file/:fileGroupId", + "url": "/api/v2/forms/submission/file/53ac7112859dcc5151000001?rand=0.04190378077328205", + "fieldId": "53a44886d55d83f96dad6ca6", + "fileUpdateTime": 1403810050144, + "imgHeader": "data:image/png;base64,", + "fileType": "image/png", + "fileSize": 560342, + "contentType": "base64", + "hashName": "filePlaceHolder10a0bd6f827beb3bc39c5f51d7daa0ea", + "fileName": "filePlaceHolder10a0bd6f827beb3bc39c5f51d7daa0ea.png", + "groupId": "53ac7112859dcc5151000001" + }] + }], + "comments": [], + "status": "complete", + "submissionStartedTimestamp": "2014-06-26T19:14:16.144Z", + "updatedTimestamp": "2014-06-26T19:14:29.415Z", + "submissionCompletedTimestamp": "2014-06-26T19:14:29.409Z", + "downloadFile": os.tmpdir() + '/download.pdf', + "pdfExportDir": '/tmp/', + "fileUrlPath": '/some/path', + "location": 'example.com.org' +}; + + +module.exports = { + get: function(){ + return _.clone(submission); + } +}; diff --git a/test/fixtures/index.js b/test/fixtures/index.js new file mode 100644 index 0000000..20c9a67 --- /dev/null +++ b/test/fixtures/index.js @@ -0,0 +1,28 @@ +var mockMongoUrl = "mongodb://someuser:somepassword@some.mongo.host:27017,some.mongo.host2:27017/mockdomain_mockenv?replicaSet=somereplset"; + +module.exports = { + forms: require('./forms'), + services: require('./services'), + mockMongoUrl: mockMongoUrl, + mockEnv: "mockenv", + mockDomain: "mockdomain", + config: require('./config'), + MockReadStream: require('./mock_readStream'), + MockWriteStream: require('./mock_writeStream'), + appdata: require('./appdata'), + envConfig: function(){ + return { + domain: this.mockDomain, + environment: this.mockEnv, + dbConf: { + user: "someuser", + pass: "somepassword", + host: "some.mongo.host,some.mongo.host2", + replicaset_name: "somereplset", + port: 27017, + name: this.mockDomain + "_" + this.mockEnv, + expectedMongoUrl: mockMongoUrl + } + } + } +}; diff --git a/test/fixtures/mock_readStream.js b/test/fixtures/mock_readStream.js new file mode 100644 index 0000000..fe531f3 --- /dev/null +++ b/test/fixtures/mock_readStream.js @@ -0,0 +1,22 @@ +var stream = require('stream'); +var util = require('util'); + +//Handy writable stream to test with +function MockReadStream () { // step 2 + stream.Readable.call(this); + this.isCalled = false; +} + +util.inherits(MockReadStream, stream.Readable); // step 1 + +//Mock read stream that emits "something" then ends. +MockReadStream.prototype._read = function () { // step 3 + if(!this.isCalled){ + this.isCalled = true; + this.push("Something"); + } else { + this.push(null); + } +}; + +module.exports = MockReadStream; \ No newline at end of file diff --git a/test/fixtures/mock_writeStream.js b/test/fixtures/mock_writeStream.js new file mode 100644 index 0000000..507dc75 --- /dev/null +++ b/test/fixtures/mock_writeStream.js @@ -0,0 +1,16 @@ +var stream = require('stream'); +var util = require('util'); + + +//Handy writable stream to test with +function MockWriteStream () { // step 2 + stream.Writable.call(this); +} + +util.inherits(MockWriteStream, stream.Writable); // step 1 + +MockWriteStream.prototype._write = function (chunk, encoding, done) { // step 3 + done(); +}; + +module.exports = MockWriteStream; \ No newline at end of file diff --git a/test/fixtures/services/index.js b/test/fixtures/services/index.js new file mode 100644 index 0000000..4e60c54 --- /dev/null +++ b/test/fixtures/services/index.js @@ -0,0 +1,21 @@ +var fixtures = require('../../fixtures'); + +module.exports = { + get: function(){ + return { + guid: "someserviceguid", + domain: fixtures.mockDomain + } + }, + deployedService: function(){ + return { + guid: "someserviceguid", + domain: fixtures.mockDomain, + environment: fixtures.mockEnv, + url: "https://somedomain-someserviceguid-someenv.feedhenry.com", + isServiceApp: true, + accessKey: "accesskey", + serviceAccessKey: "serviceaccesskey" + } + } +}; \ No newline at end of file diff --git a/test/setup.js b/test/setup.js new file mode 100644 index 0000000..5f80e86 --- /dev/null +++ b/test/setup.js @@ -0,0 +1,93 @@ +var fhconfig = require('fh-config'); +var ditchPort = 19001; +var dynofarmPort = 19002; + +var config ={ + fhmbaas:{ + key:'testkey', + protocol: "https", + "mbaasid":"development", + "appdataexport": { + "output_dir": "/tmp" + } + }, + mongo:{ + enabled: true, + host: 'localhost', + port: 27017, + name: 'test-fhmbaas-accept', + auth: { + enabled: false + }, + admin_auth: { + user: 'admin', + pass: 'admin' + } + }, + fhditch:{ + host:'localhost', + port:ditchPort, + protocol:'http' + }, + fhdfc:{ + "dynofarm":'http://localhost:' + dynofarmPort, + "username":"DYNOFARM_USERNAME", + "_password": "DYNOFAR_PASSWORD", + "cache_timeout": 30000, + "loglevel": "warn" + }, + fhamqp:{ + "enabled": false, + "max_connection_retry": 10, + "nodes":"localhost:5672", + "ssl": false, + "vhosts":{ + "events":{ + "name":"fhevents", + "user":"fheventuser", + "password":"fheventpassword" + } + }, + "app":{ + "enabled": false + } + }, + "fhmessaging":{ + "enabled": true, + "host":"localhost", + "protocol":"http", + "port":8803, + "path":"msg/TOPIC", + "cluster":"development", + "realtime": true, + "apikey":"secretkey", + "files":{ + "recovery_file":"../messages/recovery.log", + "backup_file":"../messages/backup.log" + } + }, + "fhmetrics" :{ + "host":"127.0.0.1", + "port":"8813", + "protocol":"http", + "apikey":"somekey" + }, + fhstats:{ + "enabled": false, + "host":"localhost", + "port": 8804, + "protocol": "http" + } +}; + +fhconfig.setRawConfig(config); + +module.exports.setUp = function(done){ + if (done) { + done(); + } +}; + +module.exports.config = config; +module.exports.dynofarmPort = dynofarmPort; +module.exports.ditchPort = ditchPort; diff --git a/test/stubs/dataSourceUpdater/handlers/index.js b/test/stubs/dataSourceUpdater/handlers/index.js new file mode 100644 index 0000000..4c87ecc --- /dev/null +++ b/test/stubs/dataSourceUpdater/handlers/index.js @@ -0,0 +1,117 @@ +var sinon = require('sinon'); +var fixtures = require('../../../fixtures'); + +module.exports = { + processEnvDataSources: function () { + var stub = sinon.stub(); + var mockEnvConfig = fixtures.envConfig(); + + stub.withArgs( + sinon.match( + { + currentTime: sinon.match.date, + envConfigEntry: mockEnvConfig + } + ), sinon.match.func + ) + .callsArg(1); + + stub.throws("Invalid Arguments"); + + return stub; + }, + requestEndpointData: function () { + var stub = sinon.stub(); + + stub.withArgs( + sinon.match( + { + fullUrl: sinon.match.string, + accessKey: sinon.match.string + } + ), sinon.match.func + ).callsArgWith(1, undefined, fixtures.forms.dataSources.dsDataSet()); + + stub.throws("Invalid Arguments"); + + return stub; + }, + updateDataSourceCache: function () { + var stub = sinon.stub(); + var dsWithData = fixtures.forms.dataSources.withData(); + var dsWithError = fixtures.forms.dataSources.withError(); + + stub.withArgs( + sinon.match( + { + mongoUrl: sinon.match.string, + dataSourceId: dsWithData._id, + data: sinon.match.array.and(sinon.match([sinon.match(dsWithData.data[0]), sinon.match(dsWithData[1])])) + } + ), sinon.match.func + ).callsArgWith( + 1, { + validDataSourceUpdates: [dsWithData] + } + ); + + stub.withArgs( + sinon.match( + { + mongoUrl: sinon.match.string, + dataSourceId: dsWithData._id, + error: sinon.match(dsWithError.currentStatus.error) + } + ), sinon.match.func + ).callsArgWith( + 1, { + validDataSourceUpdates: [dsWithError] + } + ); + + stub.throws("Invalid Arguments"); + + return stub; + }, + updateSingleDataSource: function (expectError) { + var stub = sinon.stub(); + var dsWithData = fixtures.forms.dataSources.withData(); + var dsWithError = fixtures.forms.dataSources.withError(); + var deployedService = fixtures.services.deployedService(); + + if(!expectError){ + stub.withArgs( + sinon.match( + { + accessKey: deployedService.serviceAccessKey, + fullUrl: sinon.match.string, + dataSourceId: dsWithData._id, + mongoUrl: sinon.match.string + } + ), + sinon.match.func + ).callsArgWith(1, undefined); + } else { + stub.withArgs( + sinon.match( + { + accessKey: sinon.match.falsy, + fullUrl: sinon.match.falsy, + dataSourceId: dsWithError._id, + mongoUrl: sinon.match.string, + error: sinon.match({ + code: sinon.match.string, + userDetail: sinon.match.string, + systemDetail: sinon.match.string + }) + } + ), + sinon.match.func + ).callsArgWith(1, undefined); + } + + stub.throws(new Error("Invalid Arguments")); + + return stub; + } +}; \ No newline at end of file diff --git a/test/stubs/dataSourceUpdater/index.js b/test/stubs/dataSourceUpdater/index.js new file mode 100644 index 0000000..8ac34af --- /dev/null +++ b/test/stubs/dataSourceUpdater/index.js @@ -0,0 +1,4 @@ + +module.exports = { + handlers: require('./handlers') +}; \ No newline at end of file diff --git a/test/stubs/fhForms/index.js b/test/stubs/fhForms/index.js new file mode 100644 index 0000000..ffd2db3 --- /dev/null +++ b/test/stubs/fhForms/index.js @@ -0,0 +1,291 @@ +var sinon = require('sinon'); +var fixtures = require('../../fixtures'); +var _ = require('underscore'); + +var mockMongoUrl = fixtures.mockMongoUrl; + +var noop = sinon.stub.yields(); + +module.exports = { + core: { + exportSubmissions: function(expectedFileUrl){ + var stub = sinon.stub(); + + //The CSV Response is always "date-formId-formName : 'full CSV Export For The Form'" + var mockCSVResponse = { + "2016-05-25-01-49-someformid-someformname": 'formName,formId,_id,submissionCompletedTimestamp,appCloudName,deviceId,deviceIPAddress,updatedTimestamp,NON ADMIN,ADMIN\ntestadmind,573c4c28c080559575da5bba,573c4c7d6be54d82766374a3,Wed May 18 2016 11:05:34 GMT+0000 (UTC),testing-cdwmcf6dpi6txzock2exkxgi-dev,B64F9A23336B4CADBE55A26B39ABBBB8,"83.147.149.210,172.18.156.30",Wed May 18 2016 11:05:43 GMT+0000 (UTC),asfasf,gsdf' + }; + + stub.withArgs( + sinon.match({ + uri: sinon.match(fixtures.mockMongoUrl) + }), + sinon.match({ + downloadUrl: sinon.match(expectedFileUrl) + }), + sinon.match.func + ) + .callsArgWith(2, undefined, mockCSVResponse); + + stub.throws("Invalid Arguments"); + return stub; + }, + getSubmissions: function(params){ + params = params || {}; + var stub = sinon.stub(); + + var expectedPaginationParams = { + page: params.expectedPage ? params.expectedPage : sinon.match.number, + limit: params.expectedLimit ? params.expectedLimit : sinon.match.number + }; + + //Checking for a filter param if required. + if(params.expectedFilter){ + expectedPaginationParams.filter = sinon.match(params.expectedFilter); + } + + var expectedParams = { + paginate: sinon.match(expectedPaginationParams) + }; + + if(params.expectedFormId){ + expectedParams.formId = sinon.match(params.expectedFormId); + } + + if(params.expectedProjectId){ + expectedParams.appId = sinon.match(params.expectedProjectId); + } + + stub.withArgs(sinon.match({ + uri: sinon.match.string + }), sinon.match(expectedParams), sinon.match.func).callsArgWith(2, undefined, { + submissions: [fixtures.forms.submissions.get(), fixtures.forms.submissions.get()], + total: 2, + pages: 1 + }); + + stub.throws("Invalid Arguments"); + + return stub; + }, + submissionSearch: function(params){ + params = params || {}; + var stub = sinon.stub(); + + stub.withArgs(sinon.match({ + uri: sinon.match.string + }), sinon.match({ + clauseOperator: sinon.match.string, + queryFields: sinon.match.object, + paginate: sinon.match({ + page: params.expectedPage ? params.expectedPage : sinon.match.number, + limit: params.expectedLimit ? params.expectedLimit : sinon.match.number + }) + }), sinon.match.func).callsArgWith(2, undefined, { + submissions: [fixtures.forms.submissions.get(), fixtures.forms.submissions.get()], + total: 2, + pages: 1 + }); + + stub.throws("Invalid Arguments"); + + return stub; + }, + generateSubmissionPdf: function() { + var mockSubmission = fixtures.forms.submissions.get(); + var stub = sinon.stub(); + stub.withArgs( + sinon.match({ + _id: sinon.match(mockSubmission._id), + pdfExportDir: sinon.match(fixtures.config.fhmbaas.pdfExportDir), + filesAreRemote: false, + fileUriPath: sinon.match.string, + downloadUrl: sinon.match.string, + location: sinon.match(mockSubmission.location), + uri: sinon.match(fixtures.mockMongoUrl) + }), sinon.match.func).callsArgWith(1, undefined, mockSubmission.downloadFile); + stub.yields("Invalid Arguments"); + return stub; + }, + dataSources: { + get: function () { + var mockDataSource = fixtures.forms.dataSources.withData(); + var mockDataSourceWithAuditLogs = fixtures.forms.dataSources.withAuditLogs(); + var stub = sinon.stub(); + + stub.withArgs(sinon.match.has('uri', mockMongoUrl), sinon.match({ + _id: mockDataSource._id, + includeAuditLog: sinon.match.falsy + }), sinon.match.func).callsArgWith(2, undefined, mockDataSource); + + //Looking for audit logs + stub.withArgs(sinon.match({ + uri: mockMongoUrl + }), sinon.match({ + _id: mockDataSource._id, + includeAuditLog: true + }), sinon.match.func).callsArgWith(2, undefined, mockDataSourceWithAuditLogs); + + stub.throws("Invalid Arguments"); + + return stub; + }, + getAuditLogEntry: function(){ + var mockAuditLog = fixtures.forms.dataSources.auditLog(); + + var stub = sinon.stub(); + + stub.withArgs(sinon.match({ + uri: mockMongoUrl + }), sinon.match({ + _id: mockAuditLog._id + }), sinon.match.func).callsArgWith(2, undefined, mockAuditLog); + + stub.throws("Invalid Arguments"); + + return stub; + }, + list: function () { + + var mockDataSource = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + stub.withArgs(sinon.match.has('uri', sinon.match(function (val) { + return val.indexOf("mongodb://") > -1; + })), sinon.match.object, sinon.match.func).callsArgWith(2, undefined, [mockDataSource]); + + stub.throws("Invalid Arguments"); + + return stub; + }, + deploy: function () { + + var mockDataSource = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + stub.withArgs(sinon.match.has('uri', mockMongoUrl), sinon.match.has("_id", mockDataSource._id).and(sinon.match.has("name", mockDataSource.name)), sinon.match.func).callsArgWith(2, undefined, mockDataSource); + + stub.throws("Invalid Arguments"); + + return stub; + }, + remove: function () { + var mockDataSource = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + stub.withArgs(sinon.match.has('uri', mockMongoUrl), sinon.match.has("_id", mockDataSource._id), sinon.match.func).callsArgWith(2); + + stub.throws("Invalid Arguments"); + + return stub; + }, + validate: function () { + var validateDataSourceStub = sinon.stub(); + + var dsWithData = fixtures.forms.dataSources.withData(); + + validateDataSourceStub.withArgs( + sinon.match( + { + uri: fixtures.envConfig().dbConf.expectedMongoUrl + } + ), sinon.match({ + _id: dsWithData._id, + data: sinon.match.array + }), + sinon.match.func + ) + .callsArgWith(2, undefined, _.extend(dsWithData, { + validationResult: { + valid: true, + message: "Data Is Valid" + } + })); + + validateDataSourceStub.throws("Invalid Arguments"); + + return validateDataSourceStub; + }, + updateCache: function () { + var updateCacheStub = sinon.stub(); + + var dsWithData = fixtures.forms.dataSources.withData(); + + var dsWithError = fixtures.forms.dataSources.withError(); + + updateCacheStub.withArgs( + sinon.match( + { + uri: fixtures.envConfig().dbConf.expectedMongoUrl + } + ), sinon.match( + [sinon.match({ + _id: dsWithData._id, + data: sinon.match.array, + dataError: sinon.match.falsy + })] + ), sinon.match({ + currentTime: sinon.match.date + }), sinon.match.func + ). + callsArgWith( + 3, undefined, { + validDataSourceUpdates: [dsWithData] + } + ); + + updateCacheStub.withArgs( + sinon.match( + { + uri: fixtures.envConfig().dbConf.expectedMongoUrl + } + ), sinon.match( + [sinon.match({ + _id: dsWithError._id, + data: sinon.match.array, + dataError: sinon.match(dsWithError.currentStatus.error) + })] + ), sinon.match({ + currentTime: sinon.match.date + }), sinon.match.func + ). + callsArgWith( + 3, undefined, { + validDataSourceUpdates: [dsWithError] + } + ); + + + updateCacheStub.throws("Invalid Arguments"); + + return updateCacheStub; + } + } + }, + 'middleware': { + 'submissions': { + 'generatePDF': noop, + 'getRequestFileParameters': noop, + 'submitFormFileBase64': noop, + 'submitFormFile': noop, + 'completeSubmission': noop, + 'getSubmissionFile': noop, + 'processFileResponse': noop, + 'status': noop, + 'listProjectSubmissions': noop, + 'get': noop, + }, + 'formProjects': { + 'getFormIds': noop, + 'getFullTheme': noop, + 'getConfig': noop + }, + 'forms': { + 'listDeployedForms': noop, + 'get': noop, + 'search': noop, + 'submitFormData':noop + }, + 'parseMongoConnectionOptions': noop + } +}; diff --git a/test/stubs/fhServiceAuth/index.js b/test/stubs/fhServiceAuth/index.js new file mode 100644 index 0000000..9713406 --- /dev/null +++ b/test/stubs/fhServiceAuth/index.js @@ -0,0 +1,143 @@ +var sinon = require('sinon'); +var fixtures = require('../../fixtures'); +var _ = require('underscore'); + +function updateDataSources() { + var stub = sinon.stub(); + var mockDs = fixtures.forms.dataSources.get(); + var mockService = fixtures.services.get(); + + var addDsMatcher = sinon.match({ + domain: fixtures.mockDomain, + guid: fixtures.services.get().guid, + dataSourceIds: sinon.match.array, + addDataSource: true + }); + + stub.withArgs(addDsMatcher).callsArgWith(1, undefined, _.extend(mockService, {dataSources: [mockDs._id]})); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function removeDataSource() { + var stub = sinon.stub(); + var mockService = fixtures.services.get(); + + var removeDsMatcher = sinon.match({ + domain: fixtures.mockDomain, + guid: fixtures.services.get().guid, + dataSourceIds: sinon.match.array + }); + + stub.withArgs(removeDsMatcher).callsArgWith(1, undefined, _.extend(mockService, {dataSources: []})); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function get(modelStubs) { + var stub = sinon.stub(); + + stub.withArgs(sinon.match(fixtures.mockMongoUrl)).returns(modelStubs); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function findOneOrCreate(stubs) { + stubs = stubs || {}; + var mockService = fixtures.services.get(); + var mockDs = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + var queryMatcher = sinon.match({ + domain: fixtures.mockDomain, + guid: mockService.guid + }); + + var serviceMatcher = sinon.match({ + guid: mockService.guid, + dataSources: [mockDs._id] + }); + + stub.withArgs(sinon.match(queryMatcher), sinon.match(serviceMatcher), sinon.match.func).callsArgWith(2, undefined, _.extend(mockService, {dataSources: [mockDs._id]}, stubs)); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function findOne(stubs) { + stubs = stubs || {}; + var mockService = fixtures.services.deployedService(); + var mockDs = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + var queryMatcher = sinon.match({ + domain: fixtures.mockDomain, + guid: mockService.guid + }); + + stub.withArgs(sinon.match(queryMatcher), sinon.match.func).callsArgWith(1, undefined, _.extend(mockService, {dataSources: [mockDs._id]}, stubs)); + + stub.withArgs(sinon.match(queryMatcher), sinon.match.has("lean"), sinon.match.func).callsArgWith(2, undefined, _.extend(mockService, {dataSources: [mockDs._id]})); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function find(){ + var mockService = fixtures.services.get(); + var mockDs = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + var queryMatcher = sinon.match({ + domain: fixtures.mockDomain + }); + + stub.withArgs(sinon.match(queryMatcher), sinon.match.func).callsArgWith(1, undefined, [_.extend(mockService, {dataSources: [mockDs._id]})]); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function remove() { + var stub = sinon.stub(); + + stub.withArgs(sinon.match.func).callsArg(0); + + stub.throws("Invalid Arguments"); + + return stub; +} + +function save() { + var mockService = fixtures.services.get(); + var mockDs = fixtures.forms.dataSources.get(); + var stub = sinon.stub(); + + stub.withArgs(sinon.match.func).callsArgWith(0, undefined, _.extend(mockService, {dataSources: [mockDs._id]})); + + stub.throws("Invalid Arguments"); + + return stub; +} + +module.exports = { + model: { + get: get, + findOneOrCreate: findOneOrCreate, + findOne: findOne, + updateDataSources: updateDataSources, + removeDataSource: removeDataSource, + remove: remove, + find: find, + save: save + } +}; \ No newline at end of file diff --git a/test/stubs/index.js b/test/stubs/index.js new file mode 100644 index 0000000..acf391d --- /dev/null +++ b/test/stubs/index.js @@ -0,0 +1,9 @@ + + +module.exports = { + forms: require('./fhForms'), + services: require('./services'), + dataSourceUpdater: require('./dataSourceUpdater'), + fhServiceAuth: require('./fhServiceAuth'), + mbaasMiddleware: require('./mbaasMiddleware') +}; diff --git a/test/stubs/mbaasMiddleware/index.js b/test/stubs/mbaasMiddleware/index.js new file mode 100644 index 0000000..1f4b860 --- /dev/null +++ b/test/stubs/mbaasMiddleware/index.js @@ -0,0 +1,11 @@ +var sinon = require('sinon'); +var passThrough = sinon.stub().yields(); + +module.exports = { + 'envMongoDb': { + 'getEnvironmentDatabase': passThrough + }, + 'auth': { + 'app': passThrough + } +}; diff --git a/test/stubs/mongo/mongoMocks.js b/test/stubs/mongo/mongoMocks.js new file mode 100644 index 0000000..ba5ec9a --- /dev/null +++ b/test/stubs/mongo/mongoMocks.js @@ -0,0 +1,33 @@ +var _ = require('underscore'); + + +var mongoMock = { + createMockedDbConnection: function(uri) { + + // Mocked DB Object + return { + mockedDb: require('../../fixtures/appdata/export/app1db.json'), + collection: function(name) { + return { + mockedCollection: this.mockedDb.collections[name], + stats: function(cb) { + cb(null, { + size: this.mockedCollection.size + }); + } + } + }, + collectionNames: function(cb) { + cb(null, Object.keys(this.mockedDb.collections)); + }, + close: function(cb) { + cb(); + } + }; + }, + connect: function(uri, cb) { + cb(null, this.createMockedDbConnection(uri)); + } +} + +module.exports.MongoClient=mongoMock; \ No newline at end of file diff --git a/test/stubs/services/appForms/dataSources/index.js b/test/stubs/services/appForms/dataSources/index.js new file mode 100644 index 0000000..6efae1f --- /dev/null +++ b/test/stubs/services/appForms/dataSources/index.js @@ -0,0 +1,22 @@ +var sinon = require('sinon'); +var fixtures = require('../../../../fixtures'); + +module.exports = { + listForUpdate: function () { + var stub = sinon.stub(); + + stub.withArgs( + sinon.match( + { + mongoUrl: fixtures.envConfig().dbConf.expectedMongoUrl, + currentTime: sinon.match.date + } + ), sinon.match.func + ) + .callsArgWith(1, undefined, [fixtures.forms.dataSources.get()]); + + stub.throws("Invalid Arguments"); + + return stub; + } +}; \ No newline at end of file diff --git a/test/stubs/services/appForms/index.js b/test/stubs/services/appForms/index.js new file mode 100644 index 0000000..d5984f4 --- /dev/null +++ b/test/stubs/services/appForms/index.js @@ -0,0 +1,4 @@ + +module.exports = { + dataSources: require('./dataSources') +}; \ No newline at end of file diff --git a/test/stubs/services/appmbaas/index.js b/test/stubs/services/appmbaas/index.js new file mode 100644 index 0000000..df3b16b --- /dev/null +++ b/test/stubs/services/appmbaas/index.js @@ -0,0 +1,31 @@ +var sinon = require('sinon'); +var fixtures = require('../../../fixtures'); + +module.exports = { + + listDeployedServices: function () { + var stub = sinon.stub(); + + stub.withArgs(sinon.match({ + domain: fixtures.mockDomain, + environment: fixtures.mockEnv + }), sinon.match.func).callsArgWith(1, undefined, [fixtures.services.deployedService()]); + + stub.throws("Invalid Arguments"); + + return stub; + }, + getDeployedService: function (returnNothing) { + var stub = sinon.stub(); + + stub.withArgs(sinon.match({ + domain: fixtures.mockDomain, + environment: fixtures.mockEnv, + guid: fixtures.services.get().guid + }), sinon.match.func).callsArgWith(1, undefined, returnNothing ? undefined : fixtures.services.deployedService()); + + stub.throws("Invalid Arguments"); + + return stub; + } +}; \ No newline at end of file diff --git a/test/stubs/services/index.js b/test/stubs/services/index.js new file mode 100644 index 0000000..25bf621 --- /dev/null +++ b/test/stubs/services/index.js @@ -0,0 +1,6 @@ + + +module.exports = { + appForms: require('./appForms'), + appmbaas: require('./appmbaas') +}; \ No newline at end of file diff --git a/test/unit/appdata/import/common.js b/test/unit/appdata/import/common.js new file mode 100644 index 0000000..5854985 --- /dev/null +++ b/test/unit/appdata/import/common.js @@ -0,0 +1,44 @@ +const fhConfig = require('fh-config'); +const EventEmitter = require('events').EventEmitter; + +const TEST_IMPORT_FOLDER = '/test/import/folder'; +const TEST_IMPORT_FILE = 'test_import_file.tar'; + +const TEST_OUTPUT_GZIPS = ['collection1.bson.gz', 'collection2.bson.gz', 'collection3.bson.gz', 'collection4.bson.gz']; +const TEST_OUTPUT_FILES = ['collection1.bson', 'collection2.bson', 'collection3.bson', 'collection4.bson']; + +const contextBuilder = require('lib/jobs/context').contextBuilder; + +fhConfig.setRawConfig({ + fhditch: { + protocol: 'http', + host: 'testing.feedhenry.me', + port: '8802', + service_key: '1a2b3c4d5e6f1a2b3c4d5e6f1a2b3c4d5e6f' + } +}); + +function createContext() { + + return contextBuilder() + .withApplicationInfo( + { guid: 'imtdbquweha2dq5etc7qrazv', + env: 'dev'} + ) + .withEventEmitter(new EventEmitter()) + .withCustomAtt('input', { + path: TEST_IMPORT_FOLDER + '/' + TEST_IMPORT_FILE + }) + .withJobModel({_id: { + toString: function() { + return '123'; + } + }}) + .build(); +} + +module.exports.createContext = createContext; +module.exports.TEST_IMPORT_FOLDER = TEST_IMPORT_FOLDER; +module.exports.TEST_IMPORT_FILE = TEST_IMPORT_FILE; +module.exports.TEST_OUTPUT_FILES = TEST_OUTPUT_FILES; +module.exports.TEST_OUTPUT_GZIPS = TEST_OUTPUT_GZIPS; \ No newline at end of file diff --git a/test/unit/appdata/import/test-appDataImportRunner.js b/test/unit/appdata/import/test-appDataImportRunner.js new file mode 100644 index 0000000..ed95155 --- /dev/null +++ b/test/unit/appdata/import/test-appDataImportRunner.js @@ -0,0 +1,126 @@ +const proxyquire = require('proxyquire'); +const sinon = require('sinon'); +const assert = require('assert'); +const _ = require('underscore'); + +const fhConfig = require('fh-config'); + +const createContext = require('./common').createContext; +const TEST_IMPORT_FOLDER = require('./common').TEST_IMPORT_FOLDER; +const TEST_OUTPUT_FILES = require('./common').TEST_OUTPUT_FILES; + + +var preparationStepsMock= { + appInfo: undefined, + prepareForImport: sinon.spy(function(context, cb) { + context.input.folder = TEST_IMPORT_FOLDER; + context.appInfo = preparationStepsMock.appInfo; + context.input.progress = { + total: TEST_OUTPUT_FILES.length * 2, + // we simulate files has already been extracted + current: TEST_OUTPUT_FILES.length + }; + context.output = { + folder: context.input.folder, + files: TEST_OUTPUT_FILES + }; + if (context.appInfo) { + if (context.appInfo.dbConf) { + return cb(null, context); + } else { + return cb('Specified app has not been upgraded yet', context); + } + } else { + return cb('Specified app could not be found', context); + } + }) +}; + +var mongoImportMock = { + mongoImport: sinon.spy(function(host, port, database, filename, cb) { + cb(); + }) +}; + +const FINISH_EVENT = require('lib/jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('lib/jobs/progressPublisher').FAIL_EVENT; + +fhConfig.setRawConfig({ + fhditch: { + protocol: 'http', + host: 'testing.feedhenry.me', + port: '8802', + service_key: '1a2b3c4d5e6f1a2b3c4d5e6f1a2b3c4d5e6f' + } +}); + +module.exports.aatest_app_data_import_runner = function(done) { + preparationStepsMock.appInfo = { + _id : '5723913f3b263ee13207211b', + id : '5723913f3b263ee13207211b', + accessKey : '5723913f3b263ee13207211a', + apiKey : 'fce3f01c631c2b001758e0f9a533016d72d90775', + coreHost : 'https://testing.feedhenry.me', + dbConf : { + pass : 'amRSIyORlHJbob', + user : 'auaf5Gl8AyGBYb', + name : 'testing-imtdbquweha2dq5etc7qrazv-dev', + port : 27017, + host : 'node1.feedhenry.local' + }, + domain : 'testing', + environment : 'dev', + guid : 'imtdbquweha2dq5etc7qrazv', + isServiceApp : false, + mbaasUrl : 'https://mbaas.feedhenry.me/', + migrated : true, + name : 'testing-imtdbquweha2dq5etc7qrazv-dev', + type : 'feedhenry', + url : 'https://testing-imtdbquweha2dq5etc7qrazv-dev.feedhenry.me' + }; + + const AppDataImportRunner = proxyquire( + 'lib/appdata/import/appDataImportRunner', + {'./preparationSteps': preparationStepsMock, + './appDataImport': mongoImportMock}).AppDataImportRunner; + + var context = createContext(); + + var appDataImportRunner = new AppDataImportRunner(context) + .on(FINISH_EVENT, function() { + assert.equal(context.input.progress.current, context.input.progress.total); + assert.equal(mongoImportMock.mongoImport.callCount, TEST_OUTPUT_FILES.length); + assert.equal(_.difference(mongoImportMock.mongoImport.getCall(0).args.slice(0, -1), ['node1.feedhenry.local',27017,'testing-imtdbquweha2dq5etc7qrazv-dev','collection1.bson']), 0); + assert.equal(_.difference(mongoImportMock.mongoImport.getCall(1).args.slice(0, -1), ['node1.feedhenry.local',27017,'testing-imtdbquweha2dq5etc7qrazv-dev','collection2.bson']), 0); + assert.equal(_.difference(mongoImportMock.mongoImport.getCall(2).args.slice(0, -1), ['node1.feedhenry.local',27017,'testing-imtdbquweha2dq5etc7qrazv-dev','collection3.bson']), 0); + assert.equal(_.difference(mongoImportMock.mongoImport.getCall(3).args.slice(0, -1), ['node1.feedhenry.local',27017,'testing-imtdbquweha2dq5etc7qrazv-dev','collection4.bson']), 0); + done(); + }) + .on(FAIL_EVENT, function(message) { + done(message); + }); + appDataImportRunner.run(); + +}; + + +module.exports.aatest_app_data_import_app_not_found = function(done) { + preparationStepsMock.appInfo = undefined; + const AppDataImportRunner = proxyquire( + 'lib/appdata/import/appDataImportRunner', + {'./preparationSteps': preparationStepsMock, + './appDataImport': mongoImportMock}).AppDataImportRunner; + + var context = createContext(); + + var appDataImportRunner = new AppDataImportRunner(context) + .on(FINISH_EVENT, function() { + done('Import should have failed'); + }) + .on(FAIL_EVENT, function(message) { + assert.equal('Specified app could not be found', message); + done(); + }); + + appDataImportRunner.run(); +}; \ No newline at end of file diff --git a/test/unit/appdata/import/test-import-middleware.js b/test/unit/appdata/import/test-import-middleware.js new file mode 100644 index 0000000..31e1a5f --- /dev/null +++ b/test/unit/appdata/import/test-import-middleware.js @@ -0,0 +1,74 @@ +"use strict"; + +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var sinon = require('sinon'); +var fhConfig = require("fh-config"); + +fhConfig.setRawConfig({ + fhmbaas: { + "appdata_jobs" : { + "upload_dir" : "/var/feedhenry/upload" + } + } +}); + + +var MOCK_FILE_ID = "575a76f0c2a6a0f341209b47"; + +var middleware = proxyquire('../../../../lib/middleware/appdata_import', { + "../storage": { + registerFileForUpload: function (filepath, filesize, callback) { + assert.equal(filepath, "/var/feedhenry/upload/test"); + assert.equal(filesize, 1024); + callback(null, { + _id: MOCK_FILE_ID + }); + } + } +}); + +exports.test_register_upload = function(done) { + var req = { + params: { + filename: "test", + filesize: 1024 + } + }; + + middleware.registerUpload(req, null, function (err) { + assert.equal(err, undefined); + assert.equal(req.params.fileId, MOCK_FILE_ID); + done(); + }); +}; + +exports.test_invalid_filesize = function(done) { + var req = { + params: { + filename: "test", + filesize: null + } + }; + + middleware.registerUpload(req, null, function (err) { + // Expect an error here + assert.ok(err); + done(); + }); +}; + +exports.test_invalid_filename = function(done) { + var req = { + params: { + filename: null, + filesize: 1024 + } + }; + + middleware.registerUpload(req, null, function (err) { + // Expect an error here + assert.ok(err); + done(); + }); +}; diff --git a/test/unit/appdata/import/test-preparationSteps.js b/test/unit/appdata/import/test-preparationSteps.js new file mode 100644 index 0000000..d73e7fa --- /dev/null +++ b/test/unit/appdata/import/test-preparationSteps.js @@ -0,0 +1,124 @@ +const fhconfig = require('fh-config'); +fhconfig.setRawConfig({}); + +const path = require('path'); +const assert = require('assert'); + +const createContext = require('./common').createContext; + +const TEST_IMPORT_FOLDER = require('./common').TEST_IMPORT_FOLDER; +const TEST_OUTPUT_GZIPS = require('./common').TEST_OUTPUT_GZIPS; +const TEST_IMPORT_FILE = require('./common').TEST_IMPORT_FILE; +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const mongoose = require('mongoose'); +const mockgoose = require('mockgoose'); +const _ = require('underscore'); + +mockgoose(mongoose); + +const models = require('fh-mbaas-middleware').models; + +models.init({mongoUrl: 'dummyurl'}, function() { + +}); + + +function resetDatabase(cb) { + mockgoose.reset(); + var AppMbaasModel = models.getModels().AppMbaas; + + var app = new AppMbaasModel( + { + accessKey : '5723913f3b263ee13207211a', + apiKey : 'fce3f01c631c2b001758e0f9a533016d72d90775', + coreHost : 'https://testing.feedhenry.me', + dbConf : { + pass : 'amRSIyORlHJbob', + user : 'auaf5Gl8AyGBYb', + name : 'testing-imtdbquweha2dq5etc7qrazv-dev', + port : 27017, + host : 'node1.feedhenry.local' + }, + domain : 'testing', + environment : 'dev', + guid : 'imtdbquweha2dq5etc7qrazv', + isServiceApp : false, + mbaasUrl : 'https://mbaas.feedhenry.me/', + migrated : true, + name : 'testing-imtdbquweha2dq5etc7qrazv-dev', + type : 'feedhenry', + url : 'https://testing-imtdbquweha2dq5etc7qrazv-dev.feedhenry.me' + } + ); + + app.save(function(err) { + cb(err); + }); +} + +var commonsMock = { + extractTarFile: sinon.spy(function(context, cb) { + context.input.folder = TEST_IMPORT_FOLDER; + cb(null, context); + }), + gunzip: sinon.spy(function gunzip(folder, file, cb) { + var outFile = file.slice(0, -3); + cb(null, outFile); + }) +}; + +var fsStub = { + readdir: function(folder, cb) { + var ary = TEST_OUTPUT_GZIPS.slice(0); + ary.push(path.basename(TEST_IMPORT_FILE)); + cb(null, ary); + }, + unlink: sinon.spy(function(path, cb) { + cb(null); + }) +}; + +module.exports.test_app_data_import_preparation_steps = function(done) { + resetDatabase(function() { + var context = createContext(); + + const prepareForImport = proxyquire('lib/appdata/import/preparationSteps', + { '../shared/common': commonsMock, + 'fs': fsStub + }).prepareForImport; + + prepareForImport(context, function(err) { + if (err) { + return done(err); + } + + assert.ok(commonsMock.extractTarFile.calledOnce); + + assert.equal(commonsMock.gunzip.callCount, 4); + + _.each(commonsMock.gunzip.getCalls(), function(call, index) { + assert.equal(call.args[0], TEST_IMPORT_FOLDER); + assert.equal(call.args[1], TEST_OUTPUT_GZIPS[index]); + }); + + assert.equal(fsStub.unlink.callCount, 4); + + _.each(fsStub.unlink.getCalls(), function(call, index) { + assert.equal(call.args[0], TEST_IMPORT_FOLDER + '/' + TEST_OUTPUT_GZIPS[index]); + }); + + assert.equal(context.output.folder, TEST_IMPORT_FOLDER); + assert.equal(context.output.folder, TEST_IMPORT_FOLDER); + + _.each(context.output.files, function(file, index) { + assert.equal(file, TEST_OUTPUT_GZIPS[index].slice(0,-3)); + }); + + // we don't perform import steps + assert.equal(context.progress.current, context.progress.total - TEST_OUTPUT_GZIPS.length); + + done(err); + }); + }); +}; \ No newline at end of file diff --git a/test/unit/dataSourceUpdater/handlers/test-processEnvDataSources.js b/test/unit/dataSourceUpdater/handlers/test-processEnvDataSources.js new file mode 100644 index 0000000..dbc894f --- /dev/null +++ b/test/unit/dataSourceUpdater/handlers/test-processEnvDataSources.js @@ -0,0 +1,92 @@ +var sinon = require('sinon'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var fixtures = require('../../../fixtures'); +var stubs = require('../../../stubs'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); + +var logger = fhConfig.getLogger(); + +var mockLogger = { + getLogger: function(){ + return { + logger: logger + }; + } +}; + +module.exports = { + "It Should Process Data Sources For An Environment": function(done){ + var mockEnvConfig = fixtures.envConfig(); + var currentTime = new Date(); + + var dsListForUpdate = stubs.services.appForms.dataSources.listForUpdate(); + var listDeployedServices = stubs.services.appmbaas.listDeployedServices(); + + var updateSingleDataSourceStub = stubs.dataSourceUpdater.handlers.updateSingleDataSource(false); + + var mocks = { + '../../../services/appForms/dataSources': { + listForUpdate: dsListForUpdate + }, + './updateSingleDataSource': updateSingleDataSourceStub, + '../../../services/appmbaas/listDeployedServices': listDeployedServices, + '../logger': mockLogger + }; + + var processEnvDataSources = proxyquire('../../../../lib/dataSourceUpdater/lib/handlers/processEnvDataSources', mocks); + + processEnvDataSources({ + currentTime: currentTime, + envConfigEntry: mockEnvConfig + }, function(err){ + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.calledOnce(dsListForUpdate); + sinon.assert.calledOnce(updateSingleDataSourceStub); + + sinon.assert.calledOnce(listDeployedServices); + + done(); + }); + }, + "It Should Process Update The Data Source With An Error If There Is No Deployed Service": function(done){ + var mockEnvConfig = fixtures.envConfig(); + var currentTime = new Date(); + + var dsListStub = stubs.services.appForms.dataSources.listForUpdate(); + + //No Deployed Services + var listDeployedServices = sinon.stub().callsArgWith(1, undefined, []); + + var updateSingleDataSourceStub = stubs.dataSourceUpdater.handlers.updateSingleDataSource(true); + + var mocks = { + '../../../services/appForms/dataSources': { + listForUpdate: dsListStub + }, + './updateSingleDataSource': updateSingleDataSourceStub, + '../../../services/appmbaas/listDeployedServices': listDeployedServices, + '../logger': mockLogger + }; + + var processEnvDataSources = proxyquire('../../../../lib/dataSourceUpdater/lib/handlers/processEnvDataSources', mocks); + + processEnvDataSources({ + currentTime: currentTime, + envConfigEntry: mockEnvConfig + }, function(err){ + assert.ok(!err, "Expected No Error"); + + sinon.assert.calledOnce(dsListStub); + + //The Update Data Source Function Should Have Been Called With An Error + sinon.assert.calledOnce(updateSingleDataSourceStub); + + sinon.assert.calledOnce(listDeployedServices); + + done(); + }); + } +}; \ No newline at end of file diff --git a/test/unit/dataSourceUpdater/handlers/test-requestEndpointData.js b/test/unit/dataSourceUpdater/handlers/test-requestEndpointData.js new file mode 100644 index 0000000..169b08d --- /dev/null +++ b/test/unit/dataSourceUpdater/handlers/test-requestEndpointData.js @@ -0,0 +1,66 @@ +var sinon = require('sinon'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var fixtures = require('../../../fixtures'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); + +var mockLogger = { + getLogger: function(){ + return { + logger: logger + }; + } +}; + +module.exports = { + "It Should Request A Data Source Data Set": function(done){ + var mockAccessKey = "serviceaccess1234"; + var mockUrl = "https://some.service.host.com/some/data/endpoint"; + var mockDSData = [{ + key: "dskey1", + value: "Value 1", + selected: true + }, + { + key: "dskey2", + value: "Value 2", + selected: false + }]; + var getStub = sinon.stub(); + getStub.withArgs(sinon.match({ + url: mockUrl, + headers: sinon.match({ + 'X-FH-SERVICE-ACCESS-KEY': mockAccessKey + }), + json: true + }), sinon.match.func) + .callsArgWith(1, undefined, { + statusCode: 200 + }, mockDSData); + + getStub.throws("Invalid Arguments"); + + + var mocks = { + 'request': { + get: getStub + }, + '../logger': mockLogger + }; + + var requestEnvDataSources = proxyquire('../../../../lib/dataSourceUpdater/lib/handlers/requestEndpointData', mocks); + + requestEnvDataSources({ + fullUrl: mockUrl, + accessKey: mockAccessKey + }, function(err, data){ + assert.ok(!err, "Expected No Error"); + + assert.equal("dskey1", data[0].key); + sinon.assert.calledOnce(getStub); + done(); + }); + } +}; \ No newline at end of file diff --git a/test/unit/dataSourceUpdater/handlers/test-updateDataSourceCache.js b/test/unit/dataSourceUpdater/handlers/test-updateDataSourceCache.js new file mode 100644 index 0000000..d88fdaf --- /dev/null +++ b/test/unit/dataSourceUpdater/handlers/test-updateDataSourceCache.js @@ -0,0 +1,91 @@ +var sinon = require('sinon'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var stubs = require('../../../stubs'); +var fixtures = require('../../../fixtures'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); + +var mockLogger = { + getLogger: function(){ + return { + logger: logger + }; + } +}; + +module.exports = { + "It Should Update Data Source Cache No Error": function (done) { + var mockDSWithData = fixtures.forms.dataSources.withData(); + var mockDSData = mockDSWithData.data; + var mockMongoUrl = fixtures.envConfig().dbConf.expectedMongoUrl; + + var updateCacheStub = stubs.forms.core.dataSources.updateCache(); + + var mocks = { + 'fh-forms': { + core: { + dataSources: { + updateCache: updateCacheStub + } + } + }, + '../logger': mockLogger + }; + + var updateDataSourceCache = proxyquire('../../../../lib/dataSourceUpdater/lib/handlers/updateDataSourceCache', mocks); + + updateDataSourceCache({ + currentTime: new Date(), + mongoUrl: mockMongoUrl, + data: mockDSData, + dataSourceId: mockDSWithData._id + }, function (err, updatedDataSource) { + assert.ok(!err, "Expected No Error"); + + assert.equal(updatedDataSource._id, mockDSWithData._id); + assert.equal(updatedDataSource.data[0].key, mockDSData[0].key); + sinon.assert.calledOnce(updateCacheStub); + + done(); + }); + }, + "It Should Update Data Source Cache With Error": function (done) { + var mockDSWithError = fixtures.forms.dataSources.withError(); + var mockDSError = mockDSWithError.currentStatus.error; + var mockMongoUrl = fixtures.envConfig().dbConf.expectedMongoUrl; + + var updateCacheStub = stubs.forms.core.dataSources.updateCache(); + + var mocks = { + 'fh-forms': { + core: { + dataSources: { + updateCache: updateCacheStub + } + } + }, + '../logger': mockLogger + }; + + var updateDataSourceCache = proxyquire('../../../../lib/dataSourceUpdater/lib/handlers/updateDataSourceCache', mocks); + + updateDataSourceCache({ + currentTime: new Date(), + mongoUrl: mockMongoUrl, + error: mockDSError, + dataSourceId: mockDSWithError._id + }, function (err, updatedDataSource) { + assert.ok(!err, "Expected No Error"); + + assert.equal(updatedDataSource._id, mockDSWithError._id); + assert.equal(updatedDataSource.data, undefined); + assert.equal(updatedDataSource.currentStatus.status, "error"); + assert.equal(updatedDataSource.currentStatus.error.code, "DS_ERROR"); + sinon.assert.calledOnce(updateCacheStub); + + done(); + }); + } +}; \ No newline at end of file diff --git a/test/unit/export/commonPreparationSteps/testCreateOutputDir.js b/test/unit/export/commonPreparationSteps/testCreateOutputDir.js new file mode 100644 index 0000000..85e3eca --- /dev/null +++ b/test/unit/export/commonPreparationSteps/testCreateOutputDir.js @@ -0,0 +1,66 @@ +const assert = require('assert'); +const proxyquire = require('proxyquire'); +const sinon = require('sinon'); +const path = require('path'); +const target = '../../../../lib/export/commonPreparationSteps/createOutputDir'; + +describe('Common Export Preparation Steps', function() { + + describe('createOutputDir', function() { + const outputDir = "/non/existing/outputdir"; + const context = createContext(outputDir); + const mkdirp = mkdirpStub(context); + const createOutputDir = proxyquire(target, {'mkdirp': mkdirp}); + + it("It should successfully create the output directory", function(done) { + createOutputDir(context, function(err, context) { + assert(mkdirp.calledOnce); + assert.equal(context.path, context.outputPath, "context.path should have been set to the created output dir."); + done(); + }); + }); + + it("It should fail if context.outputPath is not present on the context", function(done) { + context.outputPath = 'bogus'; + createOutputDir(context, function(err, context) { + assert(err, "Should error if call does not have the context.outputPath set."); + done(); + }); + }); + + }); + +}); + +function mkdirpStub(context) { + const mkdirp = sinon.stub(); + mkdirp.withArgs(sinon.match(context.outputPath), sinon.match.func).yields(undefined); + mkdirp.yields("Call did not have the required 'outputPath' property set on its context"); + return mkdirp; +} + +function pathFrom(context) { + var exportJob = context.exportJob; + return path.join(context.outputDir, exportJob.domain, exportJob.environment, exportJob.jobId); +} + +function createContext(outputDir) { + const fhConfig = require('fh-config'); + fhConfig.setRawConfig({}); + const TaggedLogger = require('../../../../lib/jobs/taggedLogger').TaggedLogger; + const logger = fhConfig.getLogger(); + + const context = { + exportJob: { + domain: 'mochdomain', + environment: 'mochevid', + status: "created", + jobId: 'mochJobId' + }, + outputDir: outputDir, + logger : new TaggedLogger(logger.child({job: 'mockJobId'}), 'TESTSUBMISSION_PREPARATION') + } + context.outputPath = pathFrom(context); + return context; +} + diff --git a/test/unit/export/submissions/testPreparationSteps.js b/test/unit/export/submissions/testPreparationSteps.js new file mode 100644 index 0000000..6b63ef5 --- /dev/null +++ b/test/unit/export/submissions/testPreparationSteps.js @@ -0,0 +1,112 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var sinon = require('sinon'); +var _ = require('underscore'); +var fixtures = require('../../../fixtures/index.js'); + +function stubFindOne(context, failWithError) { + var stub = sinon.stub(); + var options = { + domain: context.exportJob.domain, + environment: context.exportJob.environment + }; + + if(failWithError){ + stub.withArgs(sinon.match(options), sinon.match.func).callsArgWithAsync(1, "MOCK ERROR"); + } else { + var envDb = { + dbConf: fixtures.envConfig().dbConf + }; + stub.withArgs(sinon.match(options), sinon.match.func).callsArgWithAsync(1, null, envDb); + } + + stub.withArgs(sinon.match.object, sinon.match.func).callsArgWithAsync(1, null, undefined); + return stub; +} + +function setUpMocks(context, failWithError){ + var self = this; + this.mbaasMiddleware = mbaasMiddlewareStub(stubFindOne(context, failWithError)); + var preparation = proxyquire('../../../../lib/export/submissions/preparationSteps.js', { + 'fh-mbaas-middleware': this.mbaasMiddleware, + 'connectToDatabase': sinon.stub().yields(), + 'retrieveCollectionsSize': sinon.stub().yields(), + 'reserveSpaceIfAvailable': sinon.stub().yields(), + 'createOutputDir': sinon.stub().yields() + }); + this.prepare = preparation.prepare; +} + +describe('Submissions Export Preparation', function(){ + + describe('It should get the environment database configuration from fh-mbaas-middleware', function() { + var context = createContext("mockdomain", "mockenvid"); + + it("It should successfully call getEnvDbConf", function(done) { + var that = this; + _.bind(setUpMocks, this)(context); + this.prepare(context, function(err, context) { + assert.ok(context.uri, 'mongodb://someuser:somepassword@some.mongo.host,some.mongo.host2:27017/mockdomain_mockenv'); + done(); + }); + }); + + it("It should return an error if context.subExportJob.domain is undefined", function(done) { + _.bind(setUpMocks, this)(context); + this.prepare(createContext(undefined, "mockenvid"), function(err, callback) { + assert.equal(err, 'No Environment Database available for environment mockenvid and domain undefined'); + done(); + }); + }); + + it("It should return an error if context.subExportJob.environment is undefined", function(done) { + _.bind(setUpMocks, this)(context); + this.prepare(createContext("mockdomain", undefined), function(err, callback) { + assert.equal(err, 'No Environment Database available for environment undefined and domain mockdomain'); + done(); + }); + }); + + it("It should return an error if getEnvDbConf fails", function(done) { + _.bind(setUpMocks, this)(context, true); + this.prepare(context, function(err, callback) { + assert.ok(err, 'should return an error if domain in undefined'); + assert.equal(err, 'Error getting environment database: MOCK ERROR'); + done(); + }); + }); + + }); + +}); + +function createContext(domain, environment) { + var fhConfig = require('fh-config'); + fhConfig.setRawConfig({}); + var TaggedLogger = require('../../../../lib/jobs/taggedLogger').TaggedLogger; + var logger = fhConfig.getLogger(); + return { + exportJob: { + domain: domain, + environment: environment, + status: "created", + jobId: 'mockJobId' + }, + outputDir: "/some/output/dir/for/data", + logger : new TaggedLogger(logger.child({job: 'mockJobId'}), 'TESTSUBMISSION_PREPARATION') + }; +} + +function mbaasMiddlewareStub(findOneStub) { + return { + models: { + getModels: function() { + return { + Mbaas: { + findOne: findOneStub + } + } + } + } + }; +} diff --git a/test/unit/export/submissions/testSubmissionExportJob.js b/test/unit/export/submissions/testSubmissionExportJob.js new file mode 100644 index 0000000..934ef0d --- /dev/null +++ b/test/unit/export/submissions/testSubmissionExportJob.js @@ -0,0 +1,21 @@ +const fixtures = require('../../../fixtures'); +const assert = require('assert'); +const fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); + +describe("Submission Export Job", function() { + + it("all required depedencies should be resolved", function(done) { + try { + require('../../../../lib/jobs/submissions/submissionExportJob.js'); + assert.ok(true, "All dependencies should have been resolved"); + } catch(err) { + assert.ifError(err); + } finally { + done(); + } + }); + +}); + + diff --git a/test/unit/export/submissions/testSubmissionsDataExport.js b/test/unit/export/submissions/testSubmissionsDataExport.js new file mode 100644 index 0000000..cb8048b --- /dev/null +++ b/test/unit/export/submissions/testSubmissionsDataExport.js @@ -0,0 +1,99 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var sinon = require('sinon'); +var fhConfig = require('fh-config'); +var _ = require('underscore'); + +fhConfig.setRawConfig({ +}); + +var logger = fhConfig.getLogger(); +var TaggedLogger = require('../../../../lib/jobs/taggedLogger').TaggedLogger; + +var progressPublisher = require('../../../../lib/jobs/progressPublisher'); + +const PROGRESS_EVENT = progressPublisher.PROGRESS_EVENT; + + +describe("Submission Data Export", function() { + + before(function(done) { + var mockSubmissionExportJobId = "somesubmissionexportjobid"; + const LOG_TAG = "[TESTSUBMISSIONEXPORT]"; + + this.context = { + collections: ["formsubmissions", "fileStorage.files", "fileStorage.chunks"], + uri: "mongodb://mock.mongo.url/mockdomain_mockenvid", + exportJob: { + domain: "mockdomain", + environment: "mockenvid", + status: "created" + }, + jobID: mockSubmissionExportJobId, + outputDir: "/some/output/dir/for/data", + logger : new TaggedLogger(logger.child({job: mockSubmissionExportJobId}), LOG_TAG) + }; + + done(); + }); + + describe("It should handle exporting submissions collections", function() { + + before(function(done) { + + var SubmissionExportRunner = require('../../../../lib/export/submissions/SubmissionExportRunner').SubmissionExportRunner; + + this.runner = new SubmissionExportRunner(this.context); + + //Listening for events from the submission export process + this.progressEventSpy = sinon.spy(); + + this.runner.on(PROGRESS_EVENT, this.progressEventSpy); + + this.mongoExportWrapperStub = sinon.stub(); + this.mongoExportWrapperStub.withArgs(sinon.match.object, sinon.match(this.context), sinon.match("formsubmissions") + .or(sinon.match("fileStorage.files")) + .or(sinon.match("fileStorage.chunks")), + sinon.match("formsubmissions.bson.gz") + .or(sinon.match("fileStorage.files.bson.gz")) + .or(sinon.match("fileStorage.chunks.bson.gz")), sinon.match.number, sinon.match(3), sinon.match.func) + .callsArgWith(6, null); + this.mongoExportWrapperStub.throws("Invalid Arguments"); + + this.createExportArchiveStub = sinon.stub(); + this.createExportArchiveStub.withArgs(sinon.match(this.context), sinon.match.func) + .callsArgWith(1, null, this.context); + this.createExportArchiveStub.throws("Invalid Arguments"); + + var mocks = { + '../mongoExportFunctions': { + mongoExportWrapper: this.mongoExportWrapperStub, + createExportArchive: this.createExportArchiveStub + } + }; + + this.submissionDataExport = proxyquire('../../../../lib/export/submissions/submissionDataExport', mocks); + + //The export data function is always bound to the submission runnder + this.exportData = _.bind(this.submissionDataExport.exportData, this.runner); + + this.exportData(this.context, done); + }); + + it("It should export three submissions callections (formsubmissions, fileStorage.files, fileStorage.chunks)", function(done) { + sinon.assert.calledThrice(this.mongoExportWrapperStub); + done(); + }); + + it("It should emit progress event for each of the collections", function(done) { + sinon.assert.calledThrice(this.progressEventSpy); + sinon.assert.calledWith(this.progressEventSpy, "running", 1, 3); + sinon.assert.calledWith(this.progressEventSpy, "running", 2, 3); + sinon.assert.calledWith(this.progressEventSpy, "running", 3, 3); + done(); + }); + + }); + + +}); \ No newline at end of file diff --git a/test/unit/export/submissions/testSubmissionsExportRunner.js b/test/unit/export/submissions/testSubmissionsExportRunner.js new file mode 100644 index 0000000..373fed9 --- /dev/null +++ b/test/unit/export/submissions/testSubmissionsExportRunner.js @@ -0,0 +1,184 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var sinon = require('sinon'); +var fhConfig = require('fh-config'); +var _ = require('underscore'); + +fhConfig.setRawConfig({ +}); + +var logger = fhConfig.getLogger(); +var TaggedLogger = require('../../../../lib/jobs/taggedLogger').TaggedLogger; + +var progressPublisher = require('../../../../lib/jobs/progressPublisher'); + +const PROGRESS_EVENT = progressPublisher.PROGRESS_EVENT; +const STATUS_EVENT = progressPublisher.STATUS_EVENT; +const FINISH_EVENT = progressPublisher.FINISH_EVENT; +const FAIL_EVENT = progressPublisher.FAIL_EVENT; +const HEARTBEAT_EVENT = 'heartbeat'; + +function stubWithContextCallback(context, failWithError) { + var stub = sinon.stub(); + + + if(failWithError){ + stub.withArgs(sinon.match(context), sinon.match.func).callsArgWithAsync(1, "MOCK ERROR"); + } else { + stub.withArgs(sinon.match(context), sinon.match.func).callsArgWithAsync(1, null, context); + } + + + stub.throws("Invalid Arguments"); + + return stub; +} + +function setUpMocks(failOnPrepare){ + this.registerStorageStub = stubWithContextCallback(this.context); + + this.updateModelWithStorageDataStub = stubWithContextCallback(this.context); + + this.cleanUpStub = stubWithContextCallback(this.context); + + this.exportDataStub = stubWithContextCallback(this.context); + + this.prepareStub = stubWithContextCallback(this.context, failOnPrepare); + + var mocks = { + '../commonJobFunctions': { + registerStorage: this.registerStorageStub, + updateModelWithStorageData: this.updateModelWithStorageDataStub, + cleanUp: this.cleanUpStub + }, + './submissionDataExport': { + exportData: this.exportDataStub + }, + './preparationSteps': { + prepare: this.prepareStub + } + }; + + var SubmissionExportRunner = proxyquire('../../../../lib/export/submissions/SubmissionExportRunner', mocks).SubmissionExportRunner; + + this.runner = new SubmissionExportRunner(this.context); + + //Listening for events from the submission export process + this.progressEventSpy = sinon.spy(); + + this.statusEventSpy = sinon.spy(); + + this.finishEventSpy = sinon.spy(); + + this.failEventSpy = sinon.spy(); + + this.heartbeatEventSpy = sinon.spy(); + + this.runner.on(PROGRESS_EVENT, this.progressEventSpy); + + this.runner.on(STATUS_EVENT, this.statusEventSpy); + + this.runner.on(FINISH_EVENT, this.finishEventSpy); + + this.runner.on(FAIL_EVENT, this.failEventSpy); + + this.runner.on(HEARTBEAT_EVENT, this.heartbeatEventSpy); +} + + +describe('Submissions Export Runner', function(){ + + + before(function(done){ + + var mockSubmissionExportJobId = "somesubmissionexportjobid"; + const LOG_TAG = "[TESTSUBMISSIONEXPORT]"; + + this.context = { + collections: ["formsubmissions", "fileStorage.files", "fileStorage.chunks"], + exportJob: { + domain: "mockdomain", + environment: "mockenvid", + status: "created" + }, + jobID: mockSubmissionExportJobId, + outputDir: "/some/output/dir/for/data", + logger : new TaggedLogger(logger.child({job: mockSubmissionExportJobId}), LOG_TAG) + }; + + done(); + }); + + + describe('It should execute all of the required steps for submission export', function() { + before(function(done) { + _.bind(setUpMocks, this)(); + + this.runner.run(); + + done(); + }); + + it("It should call a preparation step", function(done) { + sinon.assert.calledOnce(this.prepareStub); + done(); + }); + + it("It should call an export step", function(done){ + sinon.assert.calledOnce(this.exportDataStub); + done(); + }); + + it("It should call a cleanup step", function(done){ + sinon.assert.calledOnce(this.cleanUpStub); + done(); + }); + + it("It should emit a progress event with completion", function(done){ + sinon.assert.calledOnce(this.progressEventSpy); + sinon.assert.calledWith(this.progressEventSpy, sinon.match("complete"), sinon.match.number, sinon.match.number); + done(); + }); + + it("It should NOT emit a fail event", function(done){ + sinon.assert.notCalled(this.failEventSpy); + done(); + }); + + }); + + describe("It should handler errors during a submission export", function(){ + + before(function(done){ + + //The preparation step should fail. + _.bind(setUpMocks, this)(true); + + this.runner.run(); + + done(); + }); + + it("It should emit an error event when there has been an error", function(done){ + sinon.assert.calledOnce(this.failEventSpy); + done(); + }); + + it("It should NOT call an export step", function(done){ + sinon.assert.notCalled(this.exportDataStub); + done(); + }); + + it("It should call a cleanup step", function(done){ + sinon.assert.calledOnce(this.cleanUpStub); + done(); + }); + + it("It should NOT emit a progress event with completion", function(done){ + sinon.assert.notCalled(this.progressEventSpy); + done(); + }); + + }); + +}); \ No newline at end of file diff --git a/test/unit/export/testAppDataExportCleanerRunner.js b/test/unit/export/testAppDataExportCleanerRunner.js new file mode 100644 index 0000000..22e4b43 --- /dev/null +++ b/test/unit/export/testAppDataExportCleanerRunner.js @@ -0,0 +1,196 @@ +var mongoose = require('mongoose'); +var mockgoose = require('mockgoose'); +mockgoose(mongoose); +var async = require('async'); +var sinon = require('sinon'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); + +var fhConfig = require('fh-config'); + +fhConfig.setRawConfig({ + fhditch: { + protocol: 'http', + host: 'testing.feedhenry.me', + port: '8802', + service_key: '1a2b3c4d5e6f1a2b3c4d5e6f1a2b3c4d5e6f' + }, + fhmbaas: { + appdataexport: { + default_lock_lifetime: 15000, + output_dir: "/var/feedhenry/data", + schedule_time: 30000, + cleaner: { + frequency: "*/1 * * * *", + grace_time: 10 + } + } + } +}); + + +mongoose.connect('mongodb://localhost/myapp'); + +var ExportJobModule = require('lib/models/index'); + +ExportJobModule.init(mongoose, function(err) {}); + +var ExportJob = require('lib/models/index').AppdataJob; +var FileStore = require('lib/models/index').File; + + +var fsMock = { + unlink: sinon.spy(function (path, cb) { + cb(); + }), + exists: function(path, cb) { + cb(true); + }, + stat: sinon.spy(function(path, cb) { + cb(); + }) +}; + +var rimrafMock = sinon.spy(function(path, cb) { + cb(); +}); + +var storageMock = { + deleteFile: function(fileId, cb) { + cb(); + } +}; + +var Cleaner = proxyquire('lib/export/cleaner/appDataExportCleanerRunner', + {'fs': fsMock, + '../../storage/index': storageMock, + 'rimraf': rimrafMock} + ).AppDataExportCleanerRunner; + +const FINISH_EVENT = require('lib/jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('lib/jobs/progressPublisher').FAIL_EVENT; +const ProgressPublisher = require('lib/jobs/progressPublisher').ProgressPublisher; + +var TaggedLogger = new require('lib/jobs/taggedLogger').TaggedLogger; +var logger = new TaggedLogger(fhConfig.getLogger(), '[APPDATAEXPORT CLEANER TEST]'); + +function createExportJob(exportJobFixture, cb) { + var ej = new ExportJob(); + ej.jobType='export'; + ej.id = exportJobFixture.id; + ej.created = exportJobFixture.created; + ej.domain = exportJobFixture.domain; + ej.environment = exportJobFixture.environment; + ej.appid = exportJobFixture.appid; + ej.status = exportJobFixture.status; + ej.metadata = {}; + ej.updateMetadata('fileId', exportJobFixture.metadata.fileId); + ej.updateMetadata('filePath', exportJobFixture.metadata.filePath); + + ej.save(function (err) { + cb(err); + }); +} + +function createFileStore(fileStoreFixture, cb) { + var filestore = new FileStore(); + filestore.created = fileStoreFixture.created; + filestore.directory = fileStoreFixture.directory; + filestore.fileName = fileStoreFixture.fileName; + filestore.host = fileStoreFixture.host; + filestore.size = fileStoreFixture.size; + filestore.save(cb); +} + +function initdb(cb) { + + var exportData = require('test/fixtures/appdata/export/exportjobs.json'); + var fileStore = require('test/fixtures/appdata/export/filestore.json'); + + async.parallel([ + async.apply(async.each, exportData, createExportJob), + async.apply(async.each, fileStore, createFileStore) + ], function (err) { + cb(err); + }); +} + +function fileStoreStatus(id, exists, cb) { + FileStore.find({id: id}, function (err, filestore) { + if (err) { + return cb(err); + } + + if (filestore && filestore.length > 0 && !exists) { + return cb('Filestore ' + id + ' exists'); + } + + if ((!filestore || filestore.length == 0) && exists) { + return cb('Filestore ' + id + ' does not exists'); + } + return cb(); + }) +} +function dropCollections(cb) { + if (mongoose.connection.collections.appdatajobs) { + mongoose.connection.collections['appdatajobs'].drop(function (err) { + console.log('Collection dropped'); + cb(err); + }); + } +} + +function closeConnection(cb) { + mongoose.connection.close(cb); +} + +module.exports.test_export_cleaner = function(done) { + mockgoose.reset(); + async.series([ + initdb + ], function(err) { + if (err) { + return done(err); + } + + // Mockgoose seems to have problem with '$and' at the root of the query. + // In the test lets override the query by removing that and filter. + var olderThanDate = new Date(); + olderThanDate.setDate(olderThanDate.getDate() - 10); + query = { created: {$lte: olderThanDate }, $or: [ {status: 'complete'}, {status: 'failed'}] }; + + var context = { + logger: logger, + query: query + }; + + var cleaner = new Cleaner(context); + + cleaner.on(FINISH_EVENT, function() { + + assert.ok(rimrafMock.called); + assert.ok(rimrafMock.callCount >= 3 , 'Should be called at least 3 times'); + + async.series([ + initdb, + closeConnection + ], function(err) { + done(err); + }); + }); + + cleaner.on(FAIL_EVENT, function(message) { + done(message); + }); + + var publisherFunction = function(message) { + logger.info ('EVENT:', message); + }; + + // We do not want to make 'batch' update, so we persist each received message: queue size = 1 + var progressPublisher = new ProgressPublisher(1, publisherFunction); + progressPublisher.listen(cleaner); + + cleaner.run(); + }); +}; diff --git a/test/unit/export/testAppDataExportPreparationSteps.js b/test/unit/export/testAppDataExportPreparationSteps.js new file mode 100644 index 0000000..9e36538 --- /dev/null +++ b/test/unit/export/testAppDataExportPreparationSteps.js @@ -0,0 +1,187 @@ +var fhConfig = require('fh-config'); +var proxyquire = require('proxyquire'); +var sinon = require('sinon'); +var _ = require('underscore'); +var assert = require('assert'); +const contextBuilder = require('lib/jobs/context').contextBuilder; + +fhConfig.setRawConfig({ + fhditch: { + protocol: 'http', + host: 'testing.feedhenry.me', + port: '8802', + service_key: '1a2b3c4d5e6f1a2b3c4d5e6f1a2b3c4d5e6f' + }, + fhdfc: { + cache_timeout: 300 + } +}); + +var logger = fhConfig.getLogger(); + +var mongo = require('../../stubs/mongo/mongoMocks'); +////////////// DITCH MOCK + +const MOCK_DB_URL = 'mongodb://user:pass@host:port:dbName'; + +//var mongo = mongoMocks([{name:'a', size:1024}, {name:'b', size:2048}]); + +var ditchHelperStub = { + getAppInfo: function(appName, cb) { + + mongo.MongoClient.connect(MOCK_DB_URL, function(err, db) { + db.collectionNames(function (err, collectionNames) { + cb(null, {uri: MOCK_DB_URL, collections: collectionNames}); + }); + }); + } +} + +var modelsMock = { + exportJobs:{}, + AppdataJob: { + id: 1234, + _id: { + toString: function() { + return '1234'; + } + }, + progress: undefined, + status: 'created', + domain: 'testing', + environment: 'dev', + appid: 'ad7eykyyaqpcei52a5owfi2a', + metadata: { + fileSize: 0, + fileDeleted: null, + filePath: null, + fileId: null, + stopApp: true + }, + updateMetadata: function (field, value) { + this.metadata[field] = value; + }, + aggregate: function(params, cb) { + var total = 0; + + for (var i in this.exportJobs) { + var job = this.exportJobs[i]; + if (job.status === 'created' || jobs.status === 'progress') { + total += job.metadata.fileSize; + } + } + + cb(null, [{total: total}]); + }, + save: function (cb) { + var self = this; + modelsMock.exportJobs[this.id] = { + status: self.status, + progress: self.progress + }; + cb(); + }, + set: function(field, value) { + switch (field) { + case 'progress': + this.progress = value; + break; + case 'status': + this.status = value; + break; + } + } + } +}; + +var diskSpaceMock = { + check: sinon.spy(function(outDir, cb) { + cb(null, 1000000, 1000000, undefined); + }) +}; + +var mkdirpMock = sinon.spy(function(path, cb) { + cb(null); +}); + +var stopAppStub = sinon.stub(); +stopAppStub.withArgs(sinon.match.array, sinon.match.func).callsArg(1); + +mongo['@global'] = true; +modelsMock['@global'] = true; +diskSpaceMock['@global'] = true; +mkdirpMock['@global'] = true; +fhConfig['@global'] = true; +var preparationSteps = proxyquire('lib/export/preparationSteps', { + '../util/ditchhelper': ditchHelperStub, + 'mongodb': mongo, + '../models' : modelsMock, + 'diskspace': diskSpaceMock, + 'mkdirp': mkdirpMock, + 'fh-config': fhConfig, + '../util/dfutils': proxyquire('../../../lib/util/dfutils', { + 'fh-config': fhConfig, + 'fh-dfc': function () { + return { + "stop-app": stopAppStub, + dynos : function(args, cb){ + return cb(); + }, + + 'read-app': function(args, cb){ + return cb(); + } + } + } + }) +}); + +var mockAppInfo = { + name: 'app1-123456678-dev', + guid: '123456678', + environment: 'dev' +}; + +var exportJob = modelsMock.AppdataJob; +var TaggedLogger = require('lib/jobs/taggedLogger').TaggedLogger; + +module.exports.test_prepare_export = function(done) { + var context = contextBuilder() + .withApplicationInfo(mockAppInfo) + .withJobModel(exportJob) + .withCustomAtt('outputDir', '/tmp') + .withLogger(new TaggedLogger(logger, '[APPDATAEXPORT]')) + .build(); + + preparationSteps.prepare(context, function(err, ctx) { + assert.ok(mkdirpMock.calledOnce); + assert.equal(mkdirpMock.args[0][0], '/tmp/123456678/dev/1234'); + assert.ok(diskSpaceMock.check.calledOnce); + assert.ok(stopAppStub.calledOnce); + done(err); + }); +}; + +module.exports.test_prepare_export_no_stop = function(done) { + + var context = contextBuilder() + .withApplicationInfo(mockAppInfo) + .withJobModel(exportJob) + .withCustomAtt('outputDir', '/tmp') + .withLogger(new TaggedLogger(logger, '[APPDATAEXPORT]')) + .build(); + + context.jobModel.metadata.stopApp = false; + mkdirpMock.reset(); + diskSpaceMock.check.reset(); + stopAppStub.reset(); + + preparationSteps.prepare(context, function(err, ctx) { + assert.ok(mkdirpMock.calledOnce); + assert.equal(mkdirpMock.args[0][0], '/tmp/123456678/dev/1234'); + assert.ok(diskSpaceMock.check.calledOnce); + assert.equal(false, stopAppStub.calledOnce); + done(err); + }); +}; + diff --git a/test/unit/export/testAppDataExportRunner.js b/test/unit/export/testAppDataExportRunner.js new file mode 100644 index 0000000..78fbb13 --- /dev/null +++ b/test/unit/export/testAppDataExportRunner.js @@ -0,0 +1,148 @@ +var fhConfig = require('fh-config'); +var proxyquire = require('proxyquire'); +var sinon = require('sinon'); +var _ = require('underscore'); +const contextBuilder = require('lib/jobs/context').contextBuilder; + +const FINISH_EVENT = require('lib/jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('lib/jobs/progressPublisher').FAIL_EVENT; + +fhConfig.setRawConfig({ + fhditch: { + protocol: 'http', + host: 'testing.feedhenry.me', + port: '8802', + service_key: '1a2b3c4d5e6f1a2b3c4d5e6f1a2b3c4d5e6f' + }, + openshift3: { + }, + "fhdfc": { + "dynofarm": "http://localhost:9000", + "username":"DYNOFARM_USERNAME", + "_password": "DYNOFARM_PASSWORD", + "loglevel": "warn", + "cache_timeout": 300000 + }, +}); + +var logger = fhConfig.getLogger(); +fhConfig["@global"] = true; +fhConfig.getLogger = sinon.stub().returns(logger); + +var assert = require('assert'); +var sinon = require('sinon'); + +var modelsMock = { + exportJobs:{}, + AppdataJob: { + id: 1234, + _id: { + toString: function() { + return '1234'; + } + }, + progress: undefined, + status: 'created', + metadata: { + fileSize: 0, + fileDeleted: null, + filePath: null, + fileId: null + }, + updateMetadata: function (field, value) { + this.metadata[field] = value; + }, + aggregate: function(params, cb) { + var total = 0; + + for (var i in this.exportJobs) { + var job = this.exportJobs[i]; + if (job.status === 'created' || jobs.status === 'progress') { + total += job.metadata.fileSize; + } + } + + cb(null, [{total: total}]); + }, + save: function (cb) { + var self = this; + modelsMock.exportJobs[this.id] = { + status: self.status, + progress: self.progress + }; + cb(); + }, + set: function(field, value) { + switch (field) { + case 'progress': + this.progress = value; + break; + case 'status': + this.status = value; + break; + } + } + } +}; + +var prepStepsMock = { + prepare: function(context, cb) { + context.collections=['collection1','collection2','collection3']; + context.size= 4096; + cb(null, context); + } +}; + +var appDataExportMock = { + exportData: function(context, cb) { + context.archive = {path: '/tmp/data.tar'}; + cb(null, context); + } +}; + +var fileModelMock = { + id: sinon.spy() +} + +var storageMock = { + '@global': true, + registerFile: function(path, cb) { + cb(null, fileModelMock); + } +} + +var AppExportRunner = proxyquire('lib/export/AppDataExportRunner', + { + './preparationSteps': prepStepsMock, + './appDataExport': appDataExportMock, + '../../storage': storageMock, + 'fh-config': fhConfig + }).AppExportRunner; + +module.exports.test_export_shared_app = function(done) { + var mockAppInfo = { + name: 'app1-123456678-dev', + guid: '123456678', + environment: 'dev' + }; + + var exportJob = modelsMock.AppdataJob; + + var context = contextBuilder() + .withApplicationInfo(mockAppInfo) + .withJobModel(exportJob) + .withCustomAtt('outputDir', '/tmp') + .withLogger(logger) + .build(); + + var appExportRunner = new AppExportRunner(context) + .on(FINISH_EVENT, function() { + done(); + }).on(FAIL_EVENT, function(message) { + done('failed ' + JSON.stringify(message)); + }); + + + appExportRunner.run(); +}; + diff --git a/test/unit/export/testFormatCollectionName.js b/test/unit/export/testFormatCollectionName.js new file mode 100644 index 0000000..8ac0379 --- /dev/null +++ b/test/unit/export/testFormatCollectionName.js @@ -0,0 +1,22 @@ +"use strict"; + +var format = require('../../../lib/export/appDataExport').formatCollectionName; +var assert = require('assert'); + +module.exports.test_no_underscores = function (done) { + var collectionName = "fh_testing-ezcuviwmulsalshv6nnj27vk-dev_colname"; + assert.equal(format(collectionName), "colname"); + done(); +}; + +module.exports.test_underscores = function (done) { + var collectionName = "fh_testing-ezcuviwmulsalshv6nnj27vk-dev_col_name"; + assert.equal(format(collectionName), "col_name"); + done(); +}; + +module.exports.test_improper = function (done) { + var collectionName = "improper-collection_name"; + assert.equal(format(collectionName), "improper-collection_name"); + done(); +}; diff --git a/test/unit/export/testGetSecondaryReplSetHost.js b/test/unit/export/testGetSecondaryReplSetHost.js new file mode 100644 index 0000000..6e3e5c7 --- /dev/null +++ b/test/unit/export/testGetSecondaryReplSetHost.js @@ -0,0 +1,150 @@ +"use strict"; + +var proxyquire = require('proxyquire'); +var assert = require('assert'); + +var ISODate = function (t) { + return new Date(t); +}; +var Timestamp = function (t) { + return new Date(t); +}; + +var mockNoReplicaSet = {"ok": 0, "errmsg": "not running with --replSet"}; + +var mockReplicaSet = { + "set": "eng1-mbaas1_rs1", + "date": ISODate("2016-05-26T07:08:47Z"), + "myState": 2, + "syncingTo": "eng1-mbaas1-ship2:27017", + "members": [ + { + "_id": 0, + "name": "eng1-mbaas1-mgt1:27017", + "health": 0, + "state": 8, + "stateStr": "(not reachable/healthy)", + "uptime": 0, + "optime": Timestamp(1464095884, 1), + "optimeDate": ISODate("2016-05-24T13:18:04Z"), + "lastHeartbeat": ISODate("2016-05-26T07:08:47Z"), + "lastHeartbeatRecv": ISODate("2016-05-24T13:18:06Z"), + "pingMs": 0 + }, + { + "_id": 1, + "name": "eng1-mbaas1-ship1:27017", + "health": 1, + "state": 2, + "stateStr": "SECONDARY", + "uptime": 596220, + "optime": Timestamp(1464246524, 1), + "optimeDate": ISODate("2016-05-26T07:08:44Z"), + "self": true + }, + { + "_id": 2, + "name": "eng1-mbaas1-ship2:27017", + "health": 1, + "state": 1, + "stateStr": "PRIMARY", + "uptime": 92637, + "optime": Timestamp(1464246524, 1), + "optimeDate": ISODate("2016-05-26T07:08:44Z"), + "lastHeartbeat": ISODate("2016-05-26T07:08:45Z"), + "lastHeartbeatRecv": ISODate("2016-05-26T07:08:46Z"), + "pingMs": 9, + "syncingTo": "eng1-mbaas1-mgt1:27017" + } + ], + "ok": 1 +}; + +var mockMongo = null; + +var mockChildProcess = { + exec: function (command, cb) { + var parts = command.split('--eval'); + + assert.ok(parts.length > 0); + + var command = parts[1]; + + // Need to remove the quotes from the command + command = command.substring(0, command.length - 1); + command = command.substring(2); + + // Simulates the command that is executed in mongodb + function evalInContext() { + try { + return eval(command); + } catch(err) { + return null; + } + } + + var host = evalInContext.call(mockMongo); + cb(null, host); + } +}; + +var getHost = proxyquire('../../../lib/export/mongoExportFunctions/getSecondaryReplSetHost', { + "child_process": mockChildProcess +}); + +// Make sure that it finds the secondary replica set in a setup that +// has replica sets +module.exports.test_get_host_happy = function (done) { + mockMongo = { + rs: { + status: function () { + return mockReplicaSet; + } + } + }; + + getHost(function (err, result) { + assert.equal(err, null); + assert.equal(result.host, "eng1-mbaas1-ship1"); + assert.equal(result.port, "27017"); + done(); + }); +}; + +// Make sure that it can also deal with non existing ports +module.exports.test_get_host_no_port = function (done) { + mockReplicaSet.members[1].name = "eng1-mbaas1-ship1"; + mockMongo = { + rs: { + status: function () { + return mockReplicaSet; + } + } + }; + + getHost(function (err, result) { + assert.equal(err, null); + assert.equal(result.host, "eng1-mbaas1-ship1"); + assert.equal(result.port, "27017"); + done(); + }); +}; + + +// Make sure that it returns null in a setup that does not have +// replica sets +module.exports.test_get_host_no_repl_set = function (done) { + mockMongo = { + rs: { + status: function () { + return mockNoReplicaSet; + } + } + }; + + getHost(function (err, result) { + assert.equal(err, null); + assert.equal(result, null); + done(); + }); +}; diff --git a/test/unit/handlers/analytics/test_messaging.js b/test/unit/handlers/analytics/test_messaging.js new file mode 100644 index 0000000..f04bd06 --- /dev/null +++ b/test/unit/handlers/analytics/test_messaging.js @@ -0,0 +1,51 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var util = require('util'); +var sinon = require('sinon'); + +var undertest = '../../../../lib/handlers/analytics/messaging'; + +var mockMessageClient = { + createAppMessage:function(topic,messages,cb){ + assert.ok(Array.isArray(messages)); + var lastId; + messages.forEach(function (m){ + if(lastId) { + assert.ok(m.guid === lastId,"expected all id to be the same"); + } + else{ + lastId = m.guid; + } + }); + + return cb(); + } +}; + +exports.test_create_app_message_fail = function(finish){ + var handler = proxyquire(undertest,{}); + var req = {"body":{},"params":{}}; + var res = {}; + res.end = sinon.stub(); + + handler(mockMessageClient).createAppMessage(req,res, function (err){ + assert.ok(err, "expected an error " + util.inspect(err)); + return finish(); + }); + +}; + + +exports.test_create_app_message_ok = function(finish){ + var handler = proxyquire(undertest,{}); + var req = {"body":[{"guid":"testid"},{"guid":"notmeid"},{"guid":"testid"}],"params":{"topic":"fhact","appid":"testid"}}; + var res = {}; + res.end = sinon.stub(); + + handler(mockMessageClient).createAppMessage(req,res, function (err){ + assert.ok(! err, "did not expect an error " + util.inspect(err)); + + }); + assert.ok(res.end.called,"expected end to be called"); + finish(); +}; \ No newline at end of file diff --git a/test/unit/handlers/analytics/test_metrics.js b/test/unit/handlers/analytics/test_metrics.js new file mode 100644 index 0000000..f0496de --- /dev/null +++ b/test/unit/handlers/analytics/test_metrics.js @@ -0,0 +1,83 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var util = require('util'); + + +var undertest = '../../../../lib/handlers/analytics/metrics'; + +var domRes = {"domaininstallsgeo":[],"domainrequestsdest":[{"_id":{"domain":"testing","ts":1444694400000},"value":{"other":25,"total":25}}],"domainrequestsgeo":[],"domainstartupsdest":[],"domainstartupsgeo":[],"domaintransactionsdest":[{"_id":{"domain":"testing","ts":1444694400000},"value":{"other":2,"total":2}}],"domaintransactionsgeo":[],"domaininstallsdest":[]} +var appRes = {"appinstallsgeo":[],"apprequestsdest":[{"_id":{"appid":"pe4g7d26q6wet742ak7wbgp5","domain":"testing","ts":1444694400000},"value":{"other":8,"total":8}},{"_id":{"appid":"gyiaeopx26i3pl6ls5652ipc","domain":"testing","ts":1444694400000},"value":{"other":17,"total":17}}],"apprequestsgeo":[],"appstartupsdest":[],"appstartupsgeo":[],"apptransactionsdest":[{"_id":{"appid":"pe4g7d26q6wet742ak7wbgp5","domain":"testing","ts":1444694400000},"value":{"other":1}},{"_id":{"appid":"gyiaeopx26i3pl6ls5652ipc","domain":"testing","ts":1444694400000},"value":{"other":1}}],"appinstallsdest":[],"apptransactionsgeo":[]}; + +var fhmetricsMock = { + getAllDomainMetrics : function (params,cb){ + return cb(undefined,domRes); + }, + getAllAppMetrics : function (params,cb){ + return cb(undefined,appRes); + } +}; + +var fhmetricsMockError = { + getAllDomainMetrics : function (params,cb){ + return cb({"error":"test error"}); + }, + getAllAppMetrics : function (params,cb){ + return cb({"error":"test error"}); + } +}; + + + +exports.test_get_mbaas_metrics_ok = function (finish){ + var mock = { + 'fh-metrics-client' : function (conf){ + return fhmetricsMock; + } + }; + + var test = proxyquire(undertest,mock); + var req = {"query":{"from": new Date().getTime(),"to":new Date().getTime()}}; + var res = {}; + test.getMetrics(req,res, function (err, ok){ + assert.ok(! err, "did not expect an err" + util.inspect(err)); + assert.ok(ok,"expected a response "); + assert.ok(! Array.isArray(ok)); + assert.ok(res.metrics,"expected metrics on res"); + finish(); + }); + +}; + +exports.test_get_mbaas_metrics_error = function (finish){ + var mock = { + 'fh-metrics-client' : function (conf){ + return fhmetricsMockError; + } + }; + + var test = proxyquire(undertest,mock); + var req = {"query":{"from": new Date().getTime(),"to":new Date().getTime()}}; + var res = {}; + test.getMetrics(req,res, function (err, ok){ + assert.ok(err, " expected an err" + util.inspect(err)); + assert.ok(! ok," did not expected a response "); + finish(); + }); +}; + +exports.test_get_mbaas_metrics_400_error = function (finish){ + var mock = { + 'fh-metrics-client' : function (conf){ + return fhmetricsMockError; + } + }; + + var test = proxyquire(undertest,mock); + var req = {"query":{}}; + var res = {}; + test.getMetrics(req,res, function (err, ok){ + assert.ok(err, " expected an err" + util.inspect(err)); + assert.ok(err.code === 400, " expected a 400 err code " + util.inspect(err)); + finish(); + }); +}; diff --git a/test/unit/handlers/app/test_db.js b/test/unit/handlers/app/test_db.js new file mode 100644 index 0000000..4d485f6 --- /dev/null +++ b/test/unit/handlers/app/test_db.js @@ -0,0 +1,89 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var util = require('util'); +var sinon = require('sinon'); + +var undertest = '../../../../lib/handlers/app/db.js'; + + +module.exports.test_get_connection_string_ok = function (finish){ + var req = {"appMbaasModel":{ + "dbConf":{ + "host":"10.2.3.4,10.3.4.5,10.4.5.6", + "pass":"test", + "user":"test", + "name":"test", + "port":27017 + } + }}, res = {}; + var next = sinon.spy(function (err){ + + }); + + var reload = sinon.spy(function (workers,callback){ + return callback(); + }); + + var value = sinon.spy(function (val){ + return "10.5.4.3,10.5.4.3,10.5.4.3" + }); + + var db = proxyquire(undertest,{ + "fh-config":{ + "reload":reload, + "value":value + } + }); + + db.getConnectionString(req,res,next); + assert(next.calledOnce,"expected next to be called once"); + assert(req.resultData, "expected result data"); + assert.ok(req.resultData.url,"expected a url value"); + assert.ok(req.resultData.url.indexOf("mongodb") === 0,"expected a mongodb connection string"); + finish(); + +}; + +module.exports.test_get_connection_string_not_ok_without_dbconf = function (finish){ + var db = proxyquire(undertest,{}); + var req = {}, res = {}; + var next = sinon.spy(function (err){ + assert.ok(err, "expected an error to be returned"); + assert.ok(err.code === 404, "error code should be 404") + }); + + db.getConnectionString(req,res,next); + assert(next.calledOnce,"expected next to be called one") + finish(); +}; + + +module.exports.test_get_connection_string_not_ok_when_fhconfig_reload_fails = function (finish){ + + var req = {"appMbaasModel":{ + "dbConf":{ + "host":"10.2.3.4,10.3.4.5,10.4.5.6", + "pass":"test", + "user":"test", + "name":"test" + } + }}, res = {}; + var next = sinon.spy(function (err){ + assert.ok(err, "expected an error to be returned"); + }); + + var reload = sinon.spy(function (workers,callback){ + return callback({"message":"failed to reload"}); + }); + + var db = proxyquire(undertest,{ + "fh-config":{ + "reload":reload + } + }); + + db.getConnectionString(req,res,next); + assert(next.calledOnce,"expected next to be called once"); + finish(); +}; + diff --git a/test/unit/handlers/app/test_forms.js b/test/unit/handlers/app/test_forms.js new file mode 100644 index 0000000..73fd935 --- /dev/null +++ b/test/unit/handlers/app/test_forms.js @@ -0,0 +1,172 @@ +var supertest = require('supertest'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../../fixtures'); +var stubs = require('../../../stubs'); +var express = require('express'); +var assert = require('assert'); +var sinon = require('sinon'); +var util = require('util'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); +var fs = require('fs'); +var _ = require('underscore'); +var bodyParser = require('body-parser'); +var baseRoutePath = '/:domain/:environment/:projectid/:appid/appforms'; +var baseUrl = '/mockdomain/mockenv/mockproject/mockapp/appforms'; +var archiver = require('archiver'); + +describe("Forms App Submissions Router", function() { + + function setRequestParams(coreHost, fileUriPath, cloudAppUrl, cloudAppGuid) { + return function setRequestParameters(req, res, next) { + req.appMbaasModel = { + 'coreHost': coreHost, + 'url': cloudAppUrl, + 'guid': cloudAppGuid + }; + req.fileUriPath = fileUriPath; + + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }; + } + + function assertInternalServerError(app, url, done) { + supertest(app) + .get(url) + .expect(500) + .end(function (err) { + assert.ok(!err, "Expected Invalid Arguments" + util.inspect(err)); + done(); + }); + } + + describe("submissions/:id/exportpdf tests", function() { + var mockSubmission = fixtures.forms.submissions.get(); + var exportpdfUrl = baseUrl + '/submissions/' + mockSubmission._id + '/exportpdf'; + var mockPDFFileLocation = "/some/path/to/generated/file.pdf"; + var formsRouter; + + var getValueStub = sinon.stub(); + getValueStub.withArgs(sinon.match('fhmbaas.pdfExportDir')).returns(mockPDFFileLocation); + + beforeEach(function createDownloadFile() { + fs.closeSync(fs.openSync(mockSubmission.downloadFile, 'w')); + }); + + before(function createRouter() { + var generatePDFStub = stubs.forms.core.generateSubmissionPdf(); + var deps = { + 'fh-forms': { + '@global': true, + core: { + generateSubmissionPdf: generatePDFStub + } + }, + 'fh-mbaas-middleware': _.clone(stubs.mbaasMiddleware), + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger), + value: getValueStub + } + }; + formsRouter = proxyquire('../../../../lib/handlers/app/forms.js', deps); + }); + + it("should export submission when all parameters are provided", function(done) { + var app = express(); + var expectedUrl = "https://some.path.to.cloud.app/mbaas/forms/somecloudappguid/submission/:id/file/:fileId"; + app.use(setRequestParams(mockSubmission.location, mockSubmission.fileUrlPath, expectedUrl)); + app.use(baseRoutePath, formsRouter); + + supertest(app) + .get(exportpdfUrl) + .expect(200) + .expect('Content-Type', /pdf/) + .expect(function verifyResponse(response) { + assert.ok(response); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + done(); + }); + }); + + it("should return 500 if coreHost is missing", function(done) { + var app = express(); + app.use(setRequestParams(undefined, mockSubmission.fileUrlPath)); + app.use(baseRoutePath, formsRouter); + + assertInternalServerError(app, exportpdfUrl, done); + }); + + it("should return 500 if fileUriPath is missing", function(done) { + var app = express(); + app.use(setRequestParams(mockSubmission.location, undefined)); + app.use(baseRoutePath, formsRouter); + + assertInternalServerError(app, exportpdfUrl, done); + }); + + }); + + describe("submissions/export tests", function() { + var mockSubmission = fixtures.forms.submissions.get(); + var exportCSVUrl = baseUrl + '/submissions/export'; + var mockAppGuid = "somecloudappguid"; + + var mockAppUrl = "https://some.path.to.cloud.app"; + + var expectedUrl = "https://some.path.to.cloud.app/mbaas/forms/somecloudappguid/submission/:id/file/:fileId"; + var formsRouter; + + //Proxyquire was causing errors with archiver. + //Proxyquiring archiver with the module seems to have solved the proble. + archiver['@global'] = true; + + before(function createRouter() { + var exportCSVStub = stubs.forms.core.exportSubmissions(expectedUrl); + var deps = { + 'fh-forms': { + '@global': true, + core: { + exportSubmissions: exportCSVStub + } + }, + 'fh-mbaas-middleware': _.clone(stubs.mbaasMiddleware), + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger), + value: fhConfig.value + }, + 'archiver': archiver + }; + formsRouter = proxyquire('../../../../lib/handlers/app/forms.js', deps); + }); + + it("should export submissions when all parameters are provided", function(done) { + var app = express(); + + app.use(bodyParser.json()); + app.use(setRequestParams(mockSubmission.location, mockSubmission.fileUrlPath, mockAppUrl, mockAppGuid)); + app.use(baseRoutePath, formsRouter); + + supertest(app) + .post(exportCSVUrl) + .send({}) + .expect(200) + .expect('Content-Type', "application/zip") + .expect('Content-disposition', 'attachment; filename=submissions.zip') + .expect(function verifyResponse(response) { + assert.ok(response); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + done(); + }); + }); + }); +}); + + diff --git a/test/unit/handlers/stats/test_stats.js b/test/unit/handlers/stats/test_stats.js new file mode 100644 index 0000000..0a0495b --- /dev/null +++ b/test/unit/handlers/stats/test_stats.js @@ -0,0 +1,82 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var util = require('util'); +var sinon = require('sinon'); + +var fixtures = require('../../../fixtures'); +var fhConfig = require('fh-config'); +var logger = fhConfig.getLogger(); + +var mockLogger = function() { + return { + debug: function() {}, + trace: function() {} + } +}; + +var mockRes = { + "interval": 10000, + "results": [{ + "numStats": 6, + "ts": 1459426396298, + "counters": [], + "timers": [], + "gauges": [] + }, { + "numStats": 6, + "ts": 1459426406298, + "counters": [], + "timers": [], + "gauges": [] + }] +}; + +exports.setUp = function(finish) { + fhConfig.setRawConfig(fixtures.config); + finish(); +} + +exports.test_call_stats = function(finish) { + var statsClient = proxyquire('../../../../lib/handlers/stats/stats_client', { + 'fh-config': { + getLogger: mockLogger + }, + 'request': { + 'post': function(params, cb) { + assert.ok(params.headers['x-feedhenry-statsapikey'], 'Should send stats API key'); + return cb(null, { + statusCode: 200 + }, mockRes); + } + } + }); + + statsClient({}, function(err, res) { + assert.ok(!err, 'Error - no error should be received'); + assert.ok(res); + return finish(); + }); +} + +exports.test_call_stats_error = function(finish) { + var statsClient = proxyquire('../../../../lib/handlers/stats/stats_client', { + 'fh-config': { + getLogger: mockLogger + }, + 'request': { + 'post': function(params, cb) { + assert.ok(params.headers['x-feedhenry-statsapikey'], 'Should send stats API key'); + return cb(null, { + statusCode: 500 + }, mockRes); + } + } + }); + + statsClient({}, function(err, res) { + assert.ok(err, 'Error should be received'); + assert.ok(err.message, 'Failed to call stats: 500'); + assert.ok(!res, 'Res should be null'); + return finish(); + }); +} \ No newline at end of file diff --git a/test/unit/handlers/test_healthmonitor.js b/test/unit/handlers/test_healthmonitor.js new file mode 100644 index 0000000..dda0a30 --- /dev/null +++ b/test/unit/handlers/test_healthmonitor.js @@ -0,0 +1,51 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire').noCallThru(); +var request = require('supertest'); +var express = require('express'); + +exports.test_health_checks_ok_should_return_http_ok = function(finish){ + var testResults = {status: 'ok', summary: '', details: []}; + sendAndAssertHealthCheckRequest(testResults, 200, finish); +}; + +exports.test_health_checks_crit_should_return_http_internal_server_error = function(finish){ + var testResults = {status: 'crit', summary: '', details: []}; + sendAndAssertHealthCheckRequest(testResults, 500, finish); +}; + +exports.test_health_checks_warn_should_return_http_internal_server_error = function(finish){ + var testResults = {status: 'warn', summary: '', details: []}; + sendAndAssertHealthCheckRequest(testResults, 500, finish); +}; + +function sendAndAssertHealthCheckRequest(testResults, statusCode, finish) { + request(healthMonitorApp(testResults)) + .get('/health') + .end(function(err, res) { + assert.ok(res.statusCode === statusCode, "Response status should have been " + statusCode); + finish(); + }); +} + +function healthMonitorApp(testResults) { + var healthMonitor = proxyquire('../../../lib/handlers/healthmonitor.js', {"fh-config": fhconfig(), + "../util/ditchhelper.js": {}, + 'fh-health': { + init: function() {}, + addCriticalTest: function() {}, + runTests: function(cb) { + cb(null, JSON.stringify(testResults)); + }, + setMaxRuntime: function() {} + } + }); + var app = express(); + app.use('/health', healthMonitor()); + return app; +} + +function fhconfig() { + var fhconfig = require('fh-config'); + fhconfig.setRawConfig(__dirname + '/../../../config/dev.json'); + return fhconfig; +} diff --git a/test/unit/jobs/testProgressPersistor.js b/test/unit/jobs/testProgressPersistor.js new file mode 100644 index 0000000..c7548a1 --- /dev/null +++ b/test/unit/jobs/testProgressPersistor.js @@ -0,0 +1,67 @@ +var fhConfig = require('fh-config'); + +fhConfig.setRawConfig({ + agenda:{ + notification_interval: 1 + } +}); + +var assert = require('assert'); +var sinon = require('sinon'); +var EventEmitter = require('events').EventEmitter; + +var ProgressPersistor = require('lib/jobs/progressPersistor').ProgressPersistor; +const STATUS_EVENT = require('lib/jobs/progressPublisher').STATUS_EVENT; +const PROGRESS_EVENT = require('lib/jobs/progressPublisher').PROGRESS_EVENT; +const FINISH_EVENT = require('lib/jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('lib/jobs/progressPublisher').FAIL_EVENT; + + +module.exports.test_persisting = function(done) { + var emitter = new EventEmitter(); + + var mockTask = { + set: sinon.stub(), + save: function(cb) { + cb(); + }, + toJSON: function () { + return { + status: status + } + }, + reset: function() { + this.set.reset(); + this.save.reset(); + } + }; + + sinon.stub(mockTask, 'save', mockTask.save); + + var progressPersistor = new ProgressPersistor().listen(emitter, mockTask, function(err) { + assert.ok(false, 'An error has occurred: ' + err); + }); + + emitter.emit(STATUS_EVENT, 'started'); + assert.ok(mockTask.set.called, 'Set method not called'); + + var args = mockTask.set.args[0]; + assert.equal(args[0], STATUS_EVENT, 'Bad event persisted'); + assert.equal(args[1], 'started', 'Bad event message persisted'); + assert.ok(mockTask.save.called, 'Save method not called'); + + emitter.emit(FINISH_EVENT, 'finished'); + assert.ok(mockTask.set.called, 'Set method not called'); + + mockTask.reset(); + + emitter.emit(PROGRESS_EVENT, '20%'); + assert.ok(mockTask.set.called, 'Set method not called'); + + var args = mockTask.set.args[0]; + assert.equal(args[0], PROGRESS_EVENT, 'Bad event persisted'); + assert.equal(args[1], '20%', 'Bad event message persisted'); + assert.ok(mockTask.save.called, 'Save method not called'); + + done(); +} \ No newline at end of file diff --git a/test/unit/jobs/testProgressPublisher.js b/test/unit/jobs/testProgressPublisher.js new file mode 100644 index 0000000..2793c02 --- /dev/null +++ b/test/unit/jobs/testProgressPublisher.js @@ -0,0 +1,141 @@ +var fhConfig = require('fh-config'); + +fhConfig.setRawConfig({ + agenda:{ + notification_interval: 1 + } +}); + +var assert = require('assert'); +var sinon = require('sinon'); +var EventEmitter = require('events').EventEmitter; +var ProgressPublisher = require('lib/jobs/progressPublisher').ProgressPublisher; +var _ = require('underscore'); + +const STATUS_EVENT = require('lib/jobs/progressPublisher').STATUS_EVENT; +const PROGRESS_EVENT = require('lib/jobs/progressPublisher').PROGRESS_EVENT; +const FINISH_EVENT = require('lib/jobs/progressPublisher').FINISH_EVENT; +const FAIL_EVENT = require('lib/jobs/progressPublisher').FAIL_EVENT; + +module.exports.test_publishing_batching = function(done) { + + var publishSpy = sinon.spy(); + var emitter = new EventEmitter(); + + // Lets create a publisher with a batch size of '3' + var progressPublisher = new ProgressPublisher(3, publishSpy); + progressPublisher.listen(emitter); + + emitter.emit(STATUS_EVENT, 'started'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step1'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step2'); + assert.ok(publishSpy.called, 'Publisher not called'); + publishSpy.reset(); + + // Now the message basket is empty again. Lets fill it again + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step3'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step4'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step5'); + assert.ok(publishSpy.called, 'Publisher not called'); + publishSpy.reset(); + + done(); +} + +module.exports.test_publishing = function(done) { + + var publishSpy = sinon.spy(); + var emitter = new EventEmitter(); + var ProgressPublisher = require('lib/jobs/progressPublisher').ProgressPublisher; + + // Lets create a publisher with a batch size of '3' + var progressPublisher = new ProgressPublisher(3, publishSpy); + progressPublisher.listen(emitter); + + emitter.emit(STATUS_EVENT, 'started'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step1'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step2'); + assert.ok(publishSpy.called, 'Publisher not called'); + + var args = publishSpy.args[0][0]; + + assert.ok(args, 'Publisher called with empty args'); + assert.ok(_.isArray(args), 'Publisher called with bad arguments : ' + JSON.stringify(args)); + assert.ok(args.length === 3, 'Bad number of parameters' + JSON.stringify(args)); + + assert.equal(args[0].type,STATUS_EVENT, 'Bad event found'); + assert.equal(args[0].data,'started', 'Bad event message found'); + assert.equal(args[1].type,PROGRESS_EVENT, 'Bad event found'); + assert.equal(args[1].data,'step1', 'Bad event message found'); + assert.equal(args[2].type,PROGRESS_EVENT, 'Bad event found'); + assert.equal(args[2].data,'step2', 'Bad event message found'); + + publishSpy.reset(); + + done(); +} + +module.exports.test_publishing_fail = function(done) { + var publishSpy = sinon.spy(); + var emitter = new EventEmitter(); + + // Lets create a publisher with a batch size of '10' + var progressPublisher = new ProgressPublisher(10, publishSpy); + progressPublisher.listen(emitter); + + emitter.emit(STATUS_EVENT, 'started'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step1'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step2'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(FAIL_EVENT, 'Failure message'); + assert.ok(publishSpy.called, 'Publisher called even if fail message has been sent'); + + var args = publishSpy.args[0][0]; + + assert.ok(args, 'Publisher called with empty args'); + assert.ok(_.isArray(args), 'Publisher called with bad arguments : ' + JSON.stringify(args)); + assert.ok(args.length === 4, 'Bad number of parameters' + JSON.stringify(args)); + + assert.equal(args[3].type, FAIL_EVENT, 'Bad event found'); + assert.equal(args[3].data,'Failure message', 'Bad event message found'); + + done(); +} + +module.exports.test_publishing_finish = function(done) { + var publishSpy = sinon.spy(); + var emitter = new EventEmitter(); + + // Lets create a publisher with a batch size of '10' + var progressPublisher = new ProgressPublisher(10, publishSpy); + progressPublisher.listen(emitter); + + emitter.emit(STATUS_EVENT, 'started'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step1'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(PROGRESS_EVENT, 'step2'); + assert.ok(!publishSpy.called, 'Publisher called with non full basket'); + emitter.emit(FINISH_EVENT, 'Job ended'); + assert.ok(publishSpy.called, 'Publisher called even if finish message has been sent'); + + var args = publishSpy.args[0][0]; + + assert.ok(args, 'Publisher called with empty args'); + assert.ok(_.isArray(args), 'Publisher called with bad arguments : ' + JSON.stringify(args)); + assert.ok(args.length === 4, 'Bad number of parameters' + JSON.stringify(args)); + + assert.equal(args[3].type, FINISH_EVENT, 'Bad event found: ' + args[3].type); + assert.equal(args[3].data,'Job ended', 'Bad event message found: ' + args[3].data); + + done(); +} \ No newline at end of file diff --git a/test/unit/jobs/test_appDataJob.js b/test/unit/jobs/test_appDataJob.js new file mode 100644 index 0000000..20489c7 --- /dev/null +++ b/test/unit/jobs/test_appDataJob.js @@ -0,0 +1,123 @@ +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var sinon = require('sinon'); + +var MODULE_PATH = '../../../lib/jobs/appDataJob'; + +var mockAppMbaasModel = null; +var sandbox = null; +var contextToVerify = null; +var mockExportRunner = function(context) { + contextToVerify = context; +}; +mockExportRunner.prototype.run = function() {}; +var mockImportRunner = function(context) { + contextToVerify = context; +}; +mockImportRunner.prototype.run = function() {}; + +var mockProgressPersistor = function() { + this.listen = function() {}; +}; + +var appDataJob = null; + +module.exports = { + 'before': function() { + sandbox = sinon.sandbox.create(); + mockAppMbaasModel = { + findOne: sandbox.stub() + }; + appDataJob = proxyquire(MODULE_PATH, { + 'fh-mbaas-middleware': { + 'models': { + getModels: function() { + return { + AppMbaas: mockAppMbaasModel + }; + } + } + }, + '../export/AppDataExportRunner': { + AppExportRunner: mockExportRunner + }, + '../appdata/import/appDataImportRunner': { + AppDataImportRunner: mockImportRunner + }, + './progressPersistor': { + ProgressPersistor: mockProgressPersistor + } + }); + }, + + 'after': function() { + sandbox.restore(); + }, + + 'test_export_start': function(done) { + var appModel = { + _id: 'testExportJobModel', + appid: 'testApp', + environment: 'test', + jobType: 'export', + toJSON: function() { + return {}; + } + }; + + var appData = { + id: 'testAppData' + }; + + mockAppMbaasModel.findOne.yields(null, appData); + appDataJob.start(appModel); + assert.ok(contextToVerify.appInfo); + assert.ok(contextToVerify.jobModel); + assert.equal(contextToVerify.jobID, appModel._id); + assert.ok(contextToVerify.logger); + done(); + }, + + 'test_import_start': function(done) { + var appModel = { + _id: 'testImportJobModel', + appid: 'testApp', + environment: 'test', + jobType: 'import', + toJSON: function() { + return {}; + }, + metadata: { + filePath: 'testfilepath' + } + }; + + var appData = { + id: 'testAppData' + }; + + mockAppMbaasModel.findOne.yields(null, appData); + appDataJob.start(appModel); + assert.ok(contextToVerify.appInfo); + assert.equal(contextToVerify.input.path, appModel.metadata.filePath); + assert.ok(contextToVerify.jobModel); + assert.equal(contextToVerify.jobID, appModel._id); + assert.ok(contextToVerify.logger); + done(); + }, + + 'test_error': function(done) { + var appModel = { + fail: sandbox.stub(), + toJSON: function() {} + }; + + var err = new Error('failed'); + + mockAppMbaasModel.findOne.yields(err); + appDataJob.start(appModel); + assert.ok(appModel.fail.called); + assert.equal(appModel.fail.args[0][0], err.message); + done(); + } +}; diff --git a/test/unit/jobs/test_appDataRunnerJob.js b/test/unit/jobs/test_appDataRunnerJob.js new file mode 100644 index 0000000..399630c --- /dev/null +++ b/test/unit/jobs/test_appDataRunnerJob.js @@ -0,0 +1,83 @@ +var setup = require('../../setup'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var sinon = require('sinon'); + +const MODULE_PATH = '../../../lib/jobs/appDataRunnerJob'; +var sandbox; +var mockAppdataJobModel; +var mockAppData; +var mockAgenda; +var jobToRun; +var appDataRunnerJob; +module.exports = { + 'before': function() { + setup.setUp(); + sandbox = sinon.sandbox.create(); + mockAppdataJobModel = { + runningJobs: sandbox.stub(), + findNextJob: sandbox.stub() + }; + mockAppData = { + start: sandbox.stub() + }; + mockAgenda = { + define: sandbox.spy(function(name, opts, jobFunc) { + jobToRun = jobFunc; + }), + every: sandbox.stub() + }; + appDataRunnerJob = proxyquire(MODULE_PATH, { + '../models': { + AppdataJob: mockAppdataJobModel + }, + './appDataJob': mockAppData + }); + }, + + 'after': function() { + sandbox.restore(); + }, + + 'test_appdata_scheduler_job_definition': function(done) { + var opts = { + frequency: 'every 10 seconds', + concurrency: 2 + }; + appDataRunnerJob(mockAgenda, opts); + assert.ok(mockAgenda.define.calledOnce); + assert.ok(mockAgenda.every.calledOnce); + assert.equal(opts.frequency, mockAgenda.every.args[0][0]); + + //if there are current running jobs and reaches the concurrency limit, + //no new jobs should be running + var jobs = [{id: 'job1'}, {id:'job2'}]; + mockAppdataJobModel.runningJobs.yields(null, jobs); + + var callback = sandbox.spy(); + jobToRun({}, callback); + assert.ok(callback.called); + assert.equal(0, mockAppdataJobModel.findNextJob.callCount); + + callback.reset(); + jobs = [{id:'job3'}]; + mockAppdataJobModel.runningJobs.yields(null, jobs); + var nextJob = { + id: 'job4', + checkCurrentState: sandbox.stub(), + readyToProceed: sandbox.stub() + }; + nextJob.checkCurrentState.yields(); + nextJob.readyToProceed.returns(true); + mockAppdataJobModel.findNextJob.yields(null, [nextJob]); + jobToRun({}, callback); + assert.ok(callback.calledOnce); + assert.ok(mockAppdataJobModel.findNextJob.calledOnce); + + setTimeout(function() { + assert.ok(mockAppData.start.calledOnce); + assert.ok(mockAppData.start.calledWith(nextJob)); + done(); + }, 1); + } +}; \ No newline at end of file diff --git a/test/unit/jobs/test_appDataStalledJobsFinder.js b/test/unit/jobs/test_appDataStalledJobsFinder.js new file mode 100644 index 0000000..c6ec049 --- /dev/null +++ b/test/unit/jobs/test_appDataStalledJobsFinder.js @@ -0,0 +1,59 @@ +var setup = require('../../setup'); +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var sinon = require('sinon'); + +const MODULE_PATH = '../../../lib/jobs/appDataStalledJobsFinder'; +var sandbox; +var mockAppdataJobModel; +var mockAgenda; +var jobToRun; +var stalledJobsFinder; + +module.exports = { + 'before': function() { + setup.setUp(); + sandbox = sinon.sandbox.create(); + mockAppdataJobModel = { + stalledJobs: sandbox.stub() + }; + mockAgenda = { + define: sandbox.spy(function(name, opts, jobFunc) { + jobToRun = jobFunc; + }), + every: sandbox.stub() + }; + stalledJobsFinder = proxyquire(MODULE_PATH, { + '../models': { + AppdataJob: mockAppdataJobModel + } + }); + }, + + 'after': function() { + sandbox.restore(); + }, + + 'test_stalled_jobs_finder_definition': function(done) { + var opts = { + frequency: 'every 1 minute' + }; + stalledJobsFinder(mockAgenda, opts); + assert.ok(mockAgenda.define.calledOnce); + assert.ok(mockAgenda.every.calledOnce); + assert.equal(opts.frequency, mockAgenda.every.args[0][0]); + + var mockJobs = [{ + fail: sandbox.stub() + }]; + mockAppdataJobModel.stalledJobs.yields(null, mockJobs); + mockJobs[0].fail.yields(); + + var callback = sandbox.spy(); + jobToRun({}, callback); + assert.ok(callback.calledOnce); + assert.ok(mockAppdataJobModel.stalledJobs.calledOnce); + assert.ok(mockJobs[0].fail.calledOnce); + done(); + } +}; \ No newline at end of file diff --git a/test/unit/messageHandlers/test-deployStatusHandler.js b/test/unit/messageHandlers/test-deployStatusHandler.js new file mode 100644 index 0000000..90e4ecb --- /dev/null +++ b/test/unit/messageHandlers/test-deployStatusHandler.js @@ -0,0 +1,58 @@ +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var fhconfig = require('fh-config'); +var sinon = require('sinon'); + +var mbaas={}; +mbaas.guid=""; +mbaas.domain=""; +mbaas.environment=""; +mbaas.coreHost=""; + +var prefix = "deployTest"; + +var middlewareMocks = { + models: { + getModels: function(){ + return { + AppMbaas: { + findOne: function(args, cb){ + cb(null,mbaas); + } + } + }; + } + } +}; + +var amqpStub = { + getExchangePrefix: function(){ + return prefix; + } +}; + +var supercoreApiClientMock = {}; +var amqMock = sinon.mock(); +var deployStatusHandler = proxyquire('../../../lib/messageHandlers/deployStatusHandler', { + "../util/supercoreApiClient": supercoreApiClientMock, + "fh-mbaas-middleware": middlewareMocks, + '../util/amqp.js': amqpStub +}); + +exports.it_should_send_message_to_supercore = function(finish){ + var json={ + "appname": "testing-gkntbu4dnqtnl5hxzawmujmg-dev", + "status": "finished", + "messages": ["Deployment in progress", "Deploymentfinished"], + "deployid": "577ab1b5a524dd2428334fc50e7a0a2e", + }; + + supercoreApiClientMock.sendMessageToSupercore = function(appMbaas, supercoreMessageType, json, callback){ + assert.ok(json); + finish(); + }; + var conf = {fhamqp: {}}; + var connectionStub = {subscribeToTopic: sinon.stub()}; + deployStatusHandler.listenToDeployStatus(connectionStub,conf); + deployStatusHandler.deployStatusUpdate(json); +}; diff --git a/test/unit/middleware/test_appdata.js b/test/unit/middleware/test_appdata.js new file mode 100644 index 0000000..43c8508 --- /dev/null +++ b/test/unit/middleware/test_appdata.js @@ -0,0 +1,207 @@ +var assert = require('assert'); +var deepequal=require('deep-equal'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../fixtures'); +var fhConfig = require('fh-config'); +var mockgoose = require('mockgoose'); +var mongoose = require('mongoose'); +mockgoose(mongoose); +var sinon = require('sinon'); +var _ = require('underscore'); +var AppdataJobSchema = require('../../../lib/models/AppdataJobSchema'); +fhConfig.setRawConfig(fixtures.config); + +var modulePath = '../../../lib/middleware/appdata'; +var middleware; +var jobFixture; +var models; + +var appExportControllerMock = { + startExport: sinon.stub() + .callsArgWith(1, null, jobFixture) +}; + +var fakeUrl = { url: 'http://files.skunkhenry.com/storage/some-file.gz' }; +var storageMock = { + generateURL: sinon.stub() + .callsArgWith(3, null, fakeUrl) +}; + + +exports['middleware/appdata'] = { + before: function(done) { + mongoose.connect('test', function() { + models = require('../../../lib/models'); + models.init(mongoose.connection, done); + }); + }, + after: function(done) { + mongoose.connection.close(done); + }, + beforeEach: function(done) { + // reset mocks + middleware = proxyquire(modulePath, { + '../models': models, + '../export/appDataExportController': appExportControllerMock, + './buildJobMiddleware': proxyquire('../../../lib/middleware/buildJobMiddleware', { + '../../lib/storage': storageMock + }) + }); + mockgoose.reset(); + + // repopulate fixtures + new models.AppdataJob(fixtures.appdata.createJob(1)) + .save(function(err, job) { + jobFixture = job; + done(err); + }); + }, + '#find': { + 'should populate req.job': function(done) { + var req = {}; + + var next = function(err) { + assert.equal(req.job.appid, jobFixture.appid); + assert.ok(!err); + done(); + }; + middleware.find(req, undefined, next, jobFixture._id); + }, + 'should return a 404 error when job not found': function(done) { + var req = {}; + var next = function(err) { + assert.equal(err.code, 404, err.message); + done(); + }; + middleware.find(req, undefined, next, 'zzzzzzz'); + } + }, + '#filteredJobs': { + 'should populate req.jobs': function(done) { + var req = {}; + var params = {}; + params.appid = jobFixture.appid; + params.environment = jobFixture.environment; + req.params = params; + var next = function(err) { + assert.ok(!err); + assert.equal(req.jobs[0].appid, jobFixture.appid); + done(); + }; + middleware.filteredJobs(req, undefined, next); + }, + 'should return a 500 error on find() errors': function(done) { + var req = {}; + var params = {}; + params.appid = jobFixture.appid; + params.environment = jobFixture.environment; + req.params = params; + middleware = proxyquire(modulePath, { + '../models': { + AppdataJob: { + find: sinon.stub().yields(new Error()) + } + } + }); + var next = function(err) { + assert.equal(500, err.code); + done(); + }; + middleware.filteredJobs(req, undefined, next); + } + }, + '#create': { + beforeEach: function(done) { + this.req = { + params: { + domain: 'domain', + environment: 'environment', + appid: 'appid' + } + }; + done(); + }, + 'should delegate to appExportController': function(done) { + var self = this; + var next = function(err) { + assert.ok(!err); + assert.ok(appExportControllerMock.startExport.called, 'Export controller not invoked'); + assert.ok(appExportControllerMock.startExport.calledOnce); + deepequal(self.req.job, jobFixture); + done(); + }; + this.req.body = { + stopApp: false + }; + middleware.create(this.req, undefined, next); + } + }, + '#ensureFinishedAndRegistered': { + beforeEach: function(done) { + this.job = _.clone(jobFixture); + this.job.status = AppdataJobSchema.statuses.FINISHED; + this.job.metadata.fileId = 'some-id'; + this.req = { job: this.job }; + done(); + }, + 'should error on incorrect status': function(done) { + this.job.status = AppdataJobSchema.statuses.FAILED; + var next = function(err) { + assert.ok(err); + assert.ok(/not finished/.test(err.message)); + done(); + }; + middleware.ensureFinishedAndRegistered(this.req, undefined, next); + }, + 'should error on fileId missing': function(done) { + delete this.job.metadata.fileId; + var next = function(err) { + assert.ok(err); + assert.ok(/no registered file/.test(err.message), err.message); + done(); + }; + middleware.ensureFinishedAndRegistered(this.req, undefined, next); + }, + 'should error on file deleted': function(done) { + this.job.metadata.fileDeleted = true; + var next = function(err) { + assert.ok(err); + assert.ok(/deleted/.test(err.message), err.message); + done(); + }; + middleware.ensureFinishedAndRegistered(this.req, undefined, next); + } + }, + '#generateURL': { + before: function(done) { + this.req = { + job: { + metadata: { + fileId: 'some-id' + } + } + }; + done(); + }, + 'should delegate to storage': function(done) { + var self = this; + var next = function(err) { + assert.ok(!err); + assert.ok(storageMock.generateURL.calledOnce); + assert.equal(self.req.fileUrl.url, fakeUrl.url); + done(); + }; + middleware.generateURL(this.req, undefined, next); + }, + 'should 500 on error': function(done) { + storageMock.generateURL = sinon.stub() + .callsArgWith(3, new Error('test error')); + var next = function(err) { + assert.ok(err); + assert.equal(err.code, 500); + done(); + }; + middleware.generateURL(this.req, undefined, next); + } + } +}; diff --git a/test/unit/middleware/test_mbaas_app.js b/test/unit/middleware/test_mbaas_app.js new file mode 100644 index 0000000..9b992e2 --- /dev/null +++ b/test/unit/middleware/test_mbaas_app.js @@ -0,0 +1,215 @@ +var assert = require('assert'); +var proxyquire = require('proxyquire'); +var util = require('util'); +var sinon = require('sinon'); + +var undertest = '../../../lib/middleware/mbaasApp'; + + + +module.exports.test_create_app_db_not_for_all_app_types = function (done){ + var createCalled = false; + var mocks = { + '../util/mongo.js':{ + 'createDb':function (config, user, pass, name,callback){ + createCalled = true; + assert.fail("should not get here") + } + } + }; + var mbaasApp = proxyquire(undertest,mocks); + var req = { + "appMbaasModel":{ + "type":"nonfeedhenry", + "save": function (cb){ + assert.fail("should not get here"); + } + } + }; + var res = {}; + mbaasApp.createDbForAppTypes([])(req,res,function next(err,ok){ + assert.ok(!err, "did not expect an error to be returned"); + assert.ok(!ok, "did not expect a db config to be returned"); + assert.ok(createCalled == false,"db create should not be called"); + done(); + }); +}; + + +module.exports.test_create_app_db_allowed_app_types = function (done){ + var mocks = { + '../util/mongo.js':{ + 'createDb':function (config, user, pass, name,callback){ + callback(); + } + }, + 'fh-mbaas-middleware':{ + "config":function (){ + return{ + "mongo":{ + "host":"test", + "port":"port" + } + } + } + } + }; + var mbaasApp = proxyquire(undertest,mocks); + var req = { + "appMbaasModel":{ + "name":"testapp", + "type":"openshift3", + "save": function (cb){ + return cb(); + }, + "markModified": function (){} + } + }; + var res = {}; + mbaasApp.createDbForAppTypes(["openshift3"])(req,res,function next(err,ok){ + assert.ok(!err, "did not expect an error to be returned"); + assert.ok(ok,"expected a dbconfig to be returned"); + assert.ok(ok.host === "test","expected a host"); + assert.ok(ok.port === "port","expected a port"); + assert.ok(ok.hasOwnProperty("name"),"expected a name property"); + assert.ok(ok.hasOwnProperty("user"),"expected a user property"); + assert.ok(ok.hasOwnProperty("pass"),"expected a pass property"); + done(); + }); +}; + +module.exports.test_create_does_not_create_when_dbConf_present = function (done){ + var createCalled = false; + var mocks = { + '../util/mongo.js':{ + 'createDb':function (config, user, pass, name,callback){ + createCalled = true; + assert.fail("should not get here") + } + }, + 'fh-mbaas-middleware':{ + "config":function (){ + return{ + "mongo":{ + "host":"test", + "port":"port" + } + } + } + } + }; + var mbaasApp = proxyquire(undertest,mocks); + var req = { + "appMbaasModel":{ + "name":"testapp", + "type":"openshift3", + "dbConf":{"host":""}, + "save": function (cb){ + return cb(); + }, + "markModified": function (){} + } + }; + var res = {}; + mbaasApp.createDbForAppTypes(["openshift3"])(req,res,function next(err,ok){ + assert.ok(!err, "did not expect an error to be returned"); + assert.ok(! ok,"did not expect a dbconfig to be returned"); + assert.ok(createCalled == false,"db create should not have been called"); + done(); + }); +}; + +module.exports.test_removeDbMiddlewareForMigrated = function(done){ + var dropDbStub = sinon.stub().callsArg(3); + var mocks = { + '../util/mongo.js': { + 'dropDb': dropDbStub + } + }; + var mbaasApp = proxyquire(undertest, mocks); + var req = { + appMbaasModel: { + migrated: true, + "type": "nonfeedhenry", + domain: "test", + dbConf: {user: "test1", name: "testdb1"}, + environment: "dev", + name: "test-fsdfsdgdsfgdsf-dev", + remove: function(cb){ + return cb(null, req.appMbaasModel); + } + } + }; + var res = {}; + mbaasApp.removeDbMiddleware(req, res, function next(err, ok){ + assert.ok(!err, "did not expect an error to be returned"); + assert.ok(req.resultData); + sinon.assert.calledWith(dropDbStub, sinon.match.any, 'test1', 'testdb1', sinon.match.func); + done(); + }); +}; + +module.exports.test_removeDbMiddlewareForOpenShift3 = function(done){ + var dropDbStub = sinon.stub().callsArg(3); + var mocks = { + '../util/mongo.js': { + 'dropDb': dropDbStub + } + }; + var mbaasApp = proxyquire(undertest, mocks); + var req = { + appMbaasModel: { + migrated: false, + "type": "openshift3", + domain: "test", + dbConf: {user: "test2", name: "testdb2"}, + environment: "dev", + name: "test-fsdfsdgdsfgdsf-dev", + remove: function(cb){ + return cb(null, req.appMbaasModel); + } + } + }; + var res = {}; + mbaasApp.removeDbMiddleware(req, res, function next(err, ok){ + assert.ok(!err, "did not expect an error to be returned"); + assert.ok(req.resultData); + sinon.assert.calledWith(dropDbStub, sinon.match.any, 'test2', 'testdb2', sinon.match.func); + done(); + }); +}; + + +module.exports.test_removeDbMiddlewareForDitch = function(done){ + var dropDbStub = sinon.stub().callsArg(3); + var mocks = { + '../util/mongo.js': { + 'dropDb': dropDbStub + }, + '../services/appmbaas/removeAppDb.js': function(mongo, domain, appModel, environment, callback){ + callback(null, {collections: ["test", "test2"]}); + } + }; + var mbaasApp = proxyquire(undertest, mocks); + var req = { + appMbaasModel: { + migrated: false, + "type": "nonfeedhenry", + domain: "test", + dbConf: {user: "test", name: "testdb"}, + environment: "dev", + name: "test-fsdfsdgdsfgdsf-dev", + remove: function(cb){ + return cb(null, req.appMbaasModel); + } + } + }; + var res = {}; + mbaasApp.removeDbMiddleware(req, res, function next(err, ok){ + assert.ok(!err, "did not expect an error to be returned " + util.inspect(err) ); + assert.ok(req.resultData); + sinon.assert.notCalled(dropDbStub); + done(); + }); +}; + diff --git a/test/unit/models/test-appEnv.js b/test/unit/models/test-appEnv.js new file mode 100644 index 0000000..736c09b --- /dev/null +++ b/test/unit/models/test-appEnv.js @@ -0,0 +1,129 @@ +var fixtures = require('../../fixtures'); + +var dbConf = { + host: 'localhost', + port: 27017, + name: 'test', + user: 'testuser', + pass: 'testpass' +}; + +var fhconfig = require('fh-config'); +fhconfig.setRawConfig(fixtures.config); + + +var appEnv = require('../../../lib/models/appEnv'); +var assert = require('assert'); + +exports.test_app_envs = function(finish){ + fhconfig.setRawConfig(fixtures.config); + var params = { + mbaas: {dbConf: dbConf}, + appMbaas: { + dbConf: dbConf, + migrated: true, + accessKey: "somembaasaccesskey", + type: 'feedhenry', + mbaasUrl: "https://mbaas.somembaas.com", + isServiceApp: true, + serviceAccessKey: '1234' + }, + fhconfig: fhconfig + }; + + var envs = appEnv[params.appMbaas.type](params); + assert.equal(envs.FH_AMQP_APP_ENABLED, false); + assert.equal(envs.FH_AMQP_CONN_MAX, 10); + assert.equal(envs.FH_AMQP_NODES, 'localhost:5672'); + assert.equal(envs.FH_AMQP_VHOST, 'fhevents'); + assert.equal(envs.FH_AMQP_USER, 'fheventuser'); + assert.equal(envs.FH_AMQP_PASS, 'fheventpassword'); + + assert.equal(envs.FH_DITCH_HOST, 'localhost'); + assert.equal(envs.FH_DITCH_PORT, 8802); + assert.equal(envs.FH_DITCH_PROTOCOL, 'http'); + + assert.equal(envs.FH_MESSAGING_BACKUP_FILE, '../messages/backup.log'); + assert.equal(envs.FH_MESSAGING_CLUSTER, 'development'); + assert.equal(envs.FH_MESSAGING_ENABLED, true); + assert.equal(envs.FH_MESSAGING_HOST, 'localhost'); + assert.equal(envs.FH_MESSAGING_PROTOCOL, 'http'); + assert.equal(envs.FH_MESSAGING_REALTIME_ENABLED, true); + assert.equal(envs.FH_MESSAGING_RECOVERY_FILE, '../messages/recovery.log'); + assert.equal(envs.FH_MESSAGING_SERVER, 'http://localhost:8803/msg/TOPIC'); + + assert.equal(envs.FH_MONGODB_CONN_URL, 'mongodb://testuser:testpass@localhost:27017/test'); + + assert.equal(envs.FH_STATS_ENABLED, false); + assert.equal(envs.FH_STATS_HOST, 'localhost'); + assert.equal(envs.FH_STATS_PORT, 8804); + assert.equal(envs.FH_STATS_PROTOCOL, 'http'); + + + //Checking mbaas data checked + assert.equal(envs.FH_MBAAS_HOST, "mbaas.somembaas.com"); + assert.equal(envs.FH_MBAAS_PROTOCOL, "https"); + assert.equal(envs.FH_MBAAS_ENV_ACCESS_KEY, "somembaasaccesskey"); + assert.equal(envs.FH_MBAAS_ID, "development"); + + assert.equal(params.appMbaas.serviceAccessKey, envs.FH_SERVICE_ACCESS_KEY); + + finish(); +}; + + +exports.test_app_env_os3 = function(finish){ + var params = { + mbaas: {dbConf: dbConf}, + appMbaas: { + dbConf: dbConf, + migrated: true, + accessKey: "somembaasaccesskey", + type: 'openshift3', + mbaasUrl: "https://mbaas.somembaas.com" + }, + fhconfig: fhconfig + }; + + var envs = appEnv[params.appMbaas.type](params); + assert.equal(envs.FH_MESSAGING_CLUSTER, 'development'); + assert.equal(envs.FH_MESSAGING_ENABLED, true); + assert.equal(envs.FH_MESSAGING_HOST, 'localhost'); + assert.ok(envs.hasOwnProperty("FH_MESSAGING_REALTIME_ENABLED")); + //Checking mbaas data checked + assert.equal(envs.FH_MBAAS_HOST, "mbaas.somembaas.com"); + assert.equal(envs.FH_MBAAS_PROTOCOL, "https"); + assert.equal(envs.FH_MBAAS_ENV_ACCESS_KEY, "somembaasaccesskey"); + assert.equal(envs.FH_MBAAS_ID, "development"); + finish(); +}; + + +exports.test_service_env_os3 = function(finish){ + var params = { + mbaas: {dbConf: dbConf}, + appMbaas: { + dbConf: dbConf, + migrated: true, + accessKey: "somembaasaccesskey", + type: 'openshift3', + mbaasUrl: "https://mbaas.somembaas.com", + isServiceApp: true, + serviceAccessKey: "someserviceaccesskey" + }, + fhconfig: fhconfig + }; + + var envs = appEnv[params.appMbaas.type](params); + assert.equal(envs.FH_MESSAGING_CLUSTER, 'development'); + assert.equal(envs.FH_MESSAGING_ENABLED, true); + assert.equal(envs.FH_MESSAGING_HOST, 'localhost'); + assert.ok(envs.hasOwnProperty("FH_MESSAGING_REALTIME_ENABLED")); + //Checking mbaas data checked + assert.equal(envs.FH_MBAAS_HOST, "mbaas.somembaas.com"); + assert.equal(envs.FH_MBAAS_PROTOCOL, "https"); + assert.equal(envs.FH_MBAAS_ENV_ACCESS_KEY, "somembaasaccesskey"); + assert.equal(envs.FH_MBAAS_ID, "development"); + assert.equal(envs.FH_SERVICE_ACCESS_KEY, "someserviceaccesskey"); + finish(); +}; diff --git a/test/unit/models/test-appmbaas.js b/test/unit/models/test-appmbaas.js new file mode 100644 index 0000000..751790e --- /dev/null +++ b/test/unit/models/test-appmbaas.js @@ -0,0 +1,416 @@ +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var util = require('util'); +var sinon = require('sinon'); +var _ = require('underscore'); +var fhmbaasMiddleware = require('fh-mbaas-middleware'); + +var cfg = { + mongoUrl: 'mongodb://localhost:27017/test-fhmbaas-accept', + mongo: { + host: 'localhost', + port: 8888, + name: 'fh-mbaas-test', + admin_auth: { + user: 'admin', + pass: 'admin' + } + }, + fhdfc: { + "dynofarm": "http://localhost:9000", + "username": "DYNOFARM_USERNAME", + "_password": "DYNOFRAM_PASSWORD", + "loglevel": "warn", + "cache_timeout": 1234123 + } +}; + +var fhconfig = require('fh-config'); +fhconfig.setRawConfig(cfg); + + +var dfutils = require('../../../lib/util/dfutils'); + +function done(finish){ + dfutils.clearInterval(); + finish(); +} + +exports.tearDown = function(finish){ + done(finish); +}; + + +exports.test_middleware_config = function(finish){ + fhmbaasMiddleware.setConfig(cfg, function(err){ + assert.ok(!err, 'Error in middleware config'); + finish(); + }); +}; + +exports.test_create_db = function(finish){ + var next = sinon.spy(); + var mockSave = sinon.stub(); + var createDb = sinon.stub(); + var mockMod = sinon.spy(); + + var createDatabase = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + 'fh-mbaas-middleware':{ + "config":function() { + return{ + "mongo":{ + "host":"test", + "port":"port" + } + }; + } + } + }).createDbMiddleware; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid", + type:"feedhenry" + }, + cacheKey: "2321312321", + body: {'cacheKey': '2321312321', securityToken: 'testToken'}, + appMbaasModel: { + save: mockSave, + markModified: mockMod, + name: "unit-testing", + migrated: 'true', + type:"feedhenry" + } + }; + + mockSave.callsArg(0); + createDb.callsArg(4); + createDatabase(req, req, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(mockMod.calledWith('dbConf')); + assert.ok(createDb.calledOnce, "Expected createDb To Be Called Once"); + assert.ok(createDb.calledBefore(next)); + assert.ok(mockSave.calledBefore(next)); + finish(); +}; + +exports.test_create_db_error = function(finish){ + var next = sinon.spy(); + var mockSave = sinon.stub(); + var createDb = sinon.stub(); + var mockMod = sinon.spy(); + + var createDatabase = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + 'fh-mbaas-middleware':{ + "config":function() { + return{ + "mongo":{ + "host":"test", + "port":"port" + } + }; + } + } + }).createDbMiddleware; + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid", + cacheKey: "2321312321" + }, + body: {'cacheKey':'2321312321', securityToken: 'testToken'}, + appMbaasModel: {save: mockSave, markModified: mockMod, name: "unit-testing","type":"feedhenry"} + }; + + mockSave.callsArgWith(0, new Error('mock error')); + createDb.callsArg(4); + createDatabase(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(mockMod.calledWith('dbConf')); + assert.ok(createDb.calledBefore(next)); + assert.equal(next.args[0][0], 'Error: mock error'); + finish(); +}; + + +exports.test_stop_app = function(finish){ + var next = sinon.spy(); + var stopApp = sinon.stub(); + var createDb = sinon.stub(); + + var stopAppMiddle = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + '../util/dfutils.js': {stopApp: stopApp} + }).stopAppMiddleware; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid" + }, + appMbaasModel: {name: "unit-testing"} + }; + + stopApp.callsArg(3); + stopAppMiddle(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(stopApp.calledBefore(next)); + finish(); +}; + +exports.test_stop_app_error = function(finish){ + var next = sinon.spy(); + var stopApp = sinon.stub(); + var createDb = sinon.stub(); + + var stopAppMiddle = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + '../util/dfutils.js': {stopApp: stopApp} + }).stopAppMiddleware; + + var req = { + params: { + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + } + }; + + stopApp.callsArgWith(3, new Error('mock error')); + stopAppMiddle(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(stopApp.calledBefore(next)); + assert.equal(next.args[0][0], 'Error: Failed to stop app unit-testing'); + finish(); +}; + +exports.test_migrate_db = function(finish){ + var next = sinon.spy(); + var doMigrate = sinon.stub(); + var createDb = sinon.stub(); + + var migrateDbMiddle = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + '../util/ditchhelper.js': {doMigrate: doMigrate} + }).migrateDbMiddleware; + + var req = { + params: { + appid: "someappguid", + securityToken: "securityToken", + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + domain: "somedomain", + environment: "someenvironment", + guid: "4562651426" + }, + body: { + securityToken: "securityToken", + coreHost: "http://corehost.feedhenry.com" + }, + }; + + doMigrate.callsArg(6); + migrateDbMiddle(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(doMigrate.calledBefore(next)); + finish(); +}; + +exports.test_drop_db = function(finish){ + var next = sinon.spy(); + var mockRemove = sinon.stub(); + var dropDb = sinon.stub(); + + var removeDatabase = proxyquire('../../../lib/middleware/mbaasApp.js', {'../util/mongo.js': {dropDb: dropDb}}).removeDbMiddleware; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + remove: mockRemove, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + remove: mockRemove + } + } + }; + + + mockRemove.callsArg(0); + dropDb.callsArg(3); + removeDatabase(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(dropDb.calledOnce, "Expected dropDb To Be Called Once"); + assert.ok(mockRemove.calledOnce, "Expected remove To Be Called Once"); + assert.ok(dropDb.calledBefore(next)); + finish(); +}; + +exports.test_drop_db_error = function(finish){ + var next = sinon.spy(); + var mockRemove = sinon.stub(); + var dropDb = sinon.stub(); + + var removeDatabase = proxyquire('../../../lib/middleware/mbaasApp.js', {'../util/mongo.js': {dropDb: dropDb}}).removeDbMiddleware; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + remove: mockRemove, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + remove: mockRemove + } + } + }; + + + mockRemove.callsArg(0); + dropDb.callsArgWith(3, new Error('mock error')); + removeDatabase(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.equal(next.args[0][0], 'Error: Request to remove db for app unit-testing'); + finish(); +}; + + +exports.test_get_models_info = function(finish){ + var next = sinon.spy(); + var createDb = sinon.stub(); + var mockMbaas = sinon.stub(); + var mockEnv = sinon.stub(); + + var mockFind = function(){ + return { + findOne: function(args, cb){ + return cb(); + } + } + } + + var modelsinfo = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + 'fh-mbaas-middleware': {mbaas: mockFind}, + '../models/appEnv.js': {appEnv: mockEnv} + }).modelsInfo; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + type: 'feedhenry', + mbaasUrl: 'test-url' + }, + originalUrl: 'testoriginalurl' + }; + + modelsinfo(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.ok(req.resultData.env, "Should expect request object resultData to be populated"); + finish(); +}; + +exports.test_get_models_info_error = function(finish){ + var next = sinon.spy(); + var createDb = sinon.stub(); + var mockMbaas = sinon.stub(); + var mockEnv = sinon.stub(); + + var mockFind = function(){ + return { + findOne: function(args, cb){ + return cb(new Error('Mock Error')); + } + } + } + + var modelsinfo = proxyquire('../../../lib/middleware/mbaasApp.js', { + '../util/mongo.js': {createDb: createDb}, + 'fh-mbaas-middleware': {mbaas: mockFind}, + '../models/appEnv.js': {appEnv: mockEnv} + }).modelsInfo; + + var req = { + params: { + appid: "someappguid", + domain: "somedomain", + environment: "someenvironment", + id: "somethemeid" + }, + appMbaasModel: { + name: "unit-testing", + migrated: true, + dbConf: { + user: 'user', + name: 'name' + }, + type: 'feedhenry', + mbaasUrl: 'test-url' + }, + originalUrl: 'testoriginalurl' + }; + + modelsinfo(req, {}, next); + assert.ok(next.calledOnce, "Expected Next To Be Called Once"); + assert.equal(next.args[0][0], "Error: Failed to look up Mbaas/AppMbaas instance"); + finish(); +}; + + diff --git a/test/unit/models/test-submission-export-job.js b/test/unit/models/test-submission-export-job.js new file mode 100644 index 0000000..cb031f4 --- /dev/null +++ b/test/unit/models/test-submission-export-job.js @@ -0,0 +1,78 @@ +var mongoose = require('mongoose'); +var assert = require('assert'); +var mockgoose = require('mockgoose'); +var async = require('async'); + +var fixtures = require('../../fixtures'); +mockgoose(mongoose); + +var models = require('lib/models'); + + +describe('Submission Export Job Model', function(){ + + before(function(done){ + this.connection = mongoose.createConnection("mongodb://some.mongo.host.com:27017"); + done(); + }); + + beforeEach(function(done){ + mockgoose.reset(); + + models.init(this.connection, done); + }); + + it("Initialise Submission Export Model", function(done){ + var mockSubmissionExportJobData = { + domain: fixtures.mockDomain, + environment: fixtures.mockEnv, + jobType: "export", + totalSteps: 22, + metaData: { + submissions: 22, + files: 222, + size: 213432 + }, + logs: ["This is a log"], + junk: "THIS SHOULD NOT BE HERE." + }; + + async.waterfall([ + function createAndSaveModel(cb){ + assert.ok(models.SubmissionExportJob, "Expected a submission export job to be defined"); + //Create a mock submission export Job + + var newSubmissionExportJob = new models.SubmissionExportJob(mockSubmissionExportJobData); + + + newSubmissionExportJob.save(function(err, savedJob){ + assert.ok(!err, "Expected no error " + err); + + assert.ok(savedJob, "Expected A Saved Job"); + assert.ok(savedJob._id, "Expected the saved job to have an _id parameter"); + + cb(undefined, savedJob); + }); + }, + function checkModelSaved(savedJob, cb){ + models.SubmissionExportJob.findOne({_id: savedJob._id}, function(err, foundJob){ + assert.ok(!err, "Expected no error " + err); + + assert.ok(foundJob, "Expected A Job"); + assert.equal(mockSubmissionExportJobData.domain, foundJob.domain); + assert.equal(mockSubmissionExportJobData.environment, foundJob.environment); + assert.equal(mockSubmissionExportJobData.jobType, "export"); + assert.equal(mockSubmissionExportJobData.totalSteps, foundJob.totalSteps); + assert.equal(mockSubmissionExportJobData.logs[0], foundJob.logs[0]); + assert.equal(mockSubmissionExportJobData.metaData.submissions, mockSubmissionExportJobData.metaData.submissions); + assert.equal(undefined, foundJob.junk); + cb(); + }); + } + ], function(err){ + assert.ok(!err, "Expected No Error " + err); + done(); + }); + }); + +}); \ No newline at end of file diff --git a/test/unit/routes/forms/test-datasource-router.js b/test/unit/routes/forms/test-datasource-router.js new file mode 100644 index 0000000..ce8bd2b --- /dev/null +++ b/test/unit/routes/forms/test-datasource-router.js @@ -0,0 +1,646 @@ +var supertest = require('supertest'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../../fixtures'); +var stubs = require('../../../stubs'); +var express = require('express'); +var assert = require('assert'); +var util = require('util'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); +var sinon = require('sinon'); +var bodyParser = require('body-parser'); +var _ = require('underscore'); + +var baseRoutePath = '/:domain/:environment/appforms/data_sources'; +var baseUrl = '/' + fixtures.mockDomain + '/' + fixtures.mockEnv + '/appforms/data_sources'; + +module.exports = { + "It Should List Data Sources": function (done) { + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var dsListStub = stubs.forms.core.dataSources.list(); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + list: dsListStub + } + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + } + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .get(baseUrl + '/') + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body[0]._id, mockDs._id); + assert.equal(res.body[0].serviceGuid, mockServiceDetails.guid); + }) + .end(function (err) { + if (err) { + console.error(err); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, dsListStub.callCount); + + done(); + }); + }, + "It Should Get A Single Data Source": function (done) { + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var dsGetStub = stubs.forms.core.dataSources.get(); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + get: dsGetStub + } + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + } + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .get(baseUrl + '/' + mockDs._id) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, dsGetStub.callCount); + + done(); + }); + }, + "It Should Get A Single Data Source With Audit Logs": function (done) { + var mockDSWithAuditLogs = fixtures.forms.dataSources.withAuditLogs(); + var mockServiceDetails = fixtures.services.get(); + var dsGetStub = stubs.forms.core.dataSources.get(); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + get: dsGetStub + } + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + } + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .get(baseUrl + '/' + mockDSWithAuditLogs._id + "/audit_logs") + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDSWithAuditLogs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + assert.ok(res.body.auditLogs, "Expected Audit Logs"); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(dsGetStub); + + done(); + }); + }, + "It Should Get A Single Audit Log Entry": function (done) { + var mockAuditLog = fixtures.forms.dataSources.auditLog(); + var dsGetAuditLogEntryStub = stubs.forms.core.dataSources.getAuditLogEntry(); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + getAuditLogEntry: dsGetAuditLogEntryStub + } + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + } + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .get(baseUrl + '/audit_logs/' + mockAuditLog._id) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockAuditLog._id); + assert.ok(res.body.data, "Expected a Data Response"); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(dsGetAuditLogEntryStub); + + done(); + }); + }, + "It Should Deploy A Single Data Source": function (done) { + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var deploy = stubs.forms.core.dataSources.deploy(); + var mockUpdateDataSources = stubs.fhServiceAuth.model.updateDataSources(); + + var mockGetDeployedService = stubs.services.appmbaas.getDeployedService(); + mockGetDeployedService['@global'] = true; + var mockUpdateSingleDataSource = stubs.dataSourceUpdater.handlers.updateSingleDataSource(); + + var dataSourceUpdaterModule = function () { + return { + handlers: { + updateSingleDataSource: mockUpdateSingleDataSource + } + }; + }; + dataSourceUpdaterModule['@global'] = true; + + var mockServiceModel = stubs.fhServiceAuth.model.get({ + updateDataSources: mockUpdateDataSources + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + deploy: deploy + } + } + }, + '../../../services/appmbaas/getDeployedService': mockGetDeployedService, + 'fh-service-auth': { + '@global': true, + model: { + get: mockServiceModel + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + '../../../dataSourceUpdater': dataSourceUpdaterModule + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .post(baseUrl + '/' + mockDs._id + "/deploy") + .send(mockDs) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.calledOnce(deploy); + sinon.assert.calledOnce(mockUpdateDataSources); + sinon.assert.calledOnce(mockServiceModel); + + //These functions should be called after the deploy has responded. + setTimeout(function(){ + sinon.assert.calledOnce(mockUpdateSingleDataSource); + sinon.assert.calledOnce(mockGetDeployedService); + done(); + }, 100); + }); + }, + "It Should Deploy A Single Data Source No Service Deployed": function (done) { + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var deploy = stubs.forms.core.dataSources.deploy(); + var mockUpdateDataSources = stubs.fhServiceAuth.model.updateDataSources(); + + var mockGetDeployedService = stubs.services.appmbaas.getDeployedService(true); + mockGetDeployedService['@global'] = true; + //Expecting an error update to the data source update function + var mockUpdateSingleDataSource = stubs.dataSourceUpdater.handlers.updateSingleDataSource(true); + + var dataSourceUpdaterModule = function () { + return { + handlers: { + updateSingleDataSource: mockUpdateSingleDataSource + } + }; + }; + dataSourceUpdaterModule['@global'] = true; + + var mockServiceModel = stubs.fhServiceAuth.model.get({ + updateDataSources: mockUpdateDataSources + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + deploy: deploy + } + } + }, + '../../../services/appmbaas/getDeployedService': mockGetDeployedService, + 'fh-service-auth': { + '@global': true, + model: { + get: mockServiceModel + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + '../../../dataSourceUpdater': dataSourceUpdaterModule + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .post(baseUrl + '/' + mockDs._id + "/deploy") + .send(mockDs) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.calledOnce(deploy); + sinon.assert.calledOnce(mockUpdateDataSources); + sinon.assert.calledOnce(mockServiceModel); + + //These functions should be called after the deploy has responded. + setTimeout(function(){ + sinon.assert.calledOnce(mockUpdateSingleDataSource); + sinon.assert.calledOnce(mockGetDeployedService); + done(); + }, 100); + }); + }, + "It Should Remove A Single Data Source": function (done) { + var mockDs = fixtures.forms.dataSources.get(); + var dsRemoveStub = stubs.forms.core.dataSources.remove(); + var dsGetStub = stubs.forms.core.dataSources.get(); + var mockRemoveDSStub = stubs.fhServiceAuth.model.removeDataSource(); + var mockSericeModel = stubs.fhServiceAuth.model.get({ + removeDataSource: mockRemoveDSStub + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + remove: dsRemoveStub, + get: dsGetStub + } + } + }, + 'fh-service-auth': { + '@global': true, + model: { + get: mockSericeModel + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + } + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .delete(baseUrl + '/' + mockDs._id) + .expect(204) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, dsRemoveStub.callCount); + assert.equal(1, mockSericeModel.callCount); + assert.equal(1, mockRemoveDSStub.callCount); + assert.equal(1, dsGetStub.callCount); + + done(); + }); + }, + "It Should Force A Refresh Of A Single Data Source": function (done) { + var mockDs = fixtures.forms.dataSources.withData(); + var mockServiceDetails = fixtures.services.get(); + var mockDSGet = stubs.forms.core.dataSources.get(); + var mockGetDeployedService = stubs.services.appmbaas.getDeployedService(); + mockGetDeployedService['@global'] = true; + var mockUpdateSingleDataSource = stubs.dataSourceUpdater.handlers.updateSingleDataSource(); + + var dataSourceUpdaterModule = function () { + return { + handlers: { + updateSingleDataSource: mockUpdateSingleDataSource + } + }; + }; + dataSourceUpdaterModule['@global'] = true; + + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + get: mockDSGet + } + } + }, + '../../../services/appmbaas/getDeployedService': mockGetDeployedService, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + '../../../dataSourceUpdater': dataSourceUpdaterModule + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .post(baseUrl + '/' + mockDs._id + "/refresh") + .send(mockDs) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + assert.equal(res.body.currentStatus.status, 'ok'); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.calledTwice(mockDSGet); + sinon.assert.calledOnce(mockUpdateSingleDataSource); + sinon.assert.calledOnce(mockGetDeployedService); + + done(); + }); + }, + "It Should Validate A Single Data Source": function (done) { + + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var mockDSValidate = stubs.forms.core.dataSources.validate(); + var mockGetDeployedService = stubs.services.appmbaas.getDeployedService(); + mockGetDeployedService['@global'] = true; + var mockRequestEndpointData = stubs.dataSourceUpdater.handlers.requestEndpointData(); + + var dataSourceUpdaterModule = function () { + return { + handlers: { + requestEndpointData: mockRequestEndpointData + } + }; + }; + dataSourceUpdaterModule['@global'] = true; + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + validate: mockDSValidate + } + } + }, + '../../../services/appmbaas/getDeployedService': mockGetDeployedService, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + '../../../dataSourceUpdater': dataSourceUpdaterModule + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .post(baseUrl + "/validate") + .send(mockDs) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + assert.equal(res.body.validationResult.valid, true); + assert.ok(res.body.data[0], "Expected A Data Set"); + }) + .end(function (err) { + if (err) { + logger.error(err); + } + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.calledOnce(mockDSValidate); + sinon.assert.calledOnce(mockRequestEndpointData); + sinon.assert.calledOnce(mockGetDeployedService); + + done(); + }); + }, + "It Should Validate A Single Data Source No Deployed Service": function (done) { + + var mockDs = fixtures.forms.dataSources.get(); + var mockServiceDetails = fixtures.services.get(); + var mockDSValidate = stubs.forms.core.dataSources.validate(); + var mockGetDeployedService = stubs.services.appmbaas.getDeployedService(true); + mockGetDeployedService['@global'] = true; + var mockRequestEndpointData = stubs.dataSourceUpdater.handlers.requestEndpointData(); + + var dataSourceUpdaterModule = function () { + return { + handlers: { + requestEndpointData: mockRequestEndpointData + } + }; + }; + dataSourceUpdaterModule['@global'] = true; + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + dataSources: { + validate: mockDSValidate + } + } + }, + '../../../services/appmbaas/getDeployedService': mockGetDeployedService, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + '../../../dataSourceUpdater': dataSourceUpdaterModule + }; + + var dsRouter = proxyquire('../../../../lib/routes/forms/dataSources/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, dsRouter); + + supertest(app) + .post(baseUrl + "/validate") + .send(mockDs) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body._id, mockDs._id); + assert.equal(res.body.serviceGuid, mockServiceDetails.guid); + assert.equal(res.body.validationResult.valid, false); + assert.ok(res.body.validationResult.message.indexOf("deployed") > -1); + assert.ok(!res.body.data, "Expected NO Data Set"); + }) + .end(function (err) { + if (err) { + logger.error(err); + } + assert.ok(!err, "Expected No Error " + err); + + sinon.assert.notCalled(mockDSValidate); + sinon.assert.notCalled(mockRequestEndpointData); + sinon.assert.calledOnce(mockGetDeployedService); + + done(); + }); + } +}; diff --git a/test/unit/routes/forms/test-export-router.js b/test/unit/routes/forms/test-export-router.js new file mode 100644 index 0000000..3da45ea --- /dev/null +++ b/test/unit/routes/forms/test-export-router.js @@ -0,0 +1,94 @@ +var supertest = require('supertest'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../../fixtures'); +var fhConfig = require('fh-config'); +var sinon = require('sinon'); +var assert = require('assert'); +var express = require('express'); +fhConfig.setRawConfig(fixtures.config); +var jobMocks = [{ + id: 'id', + type: 'export' +}, { + id: 'id2', + type: 'export' +}]; + +var fakeUrl = 'http://example.com/file.tar'; + +var mockMiddleware = { + find: sinon.spy(function(req, res, next, jobId) { + req.job = jobMocks[0]; + next(); + }), + filteredJobs: function(req, res, next) { + req.jobs = jobMocks; + next(); + }, + create: function(req, res, next) { + req.job = jobMocks[1]; + next(); + }, + ensureFinishedAndRegistered: sinon.spy(function(req, res, next) { + next(); + }), + generateURL: sinon.spy(function(req, res, next) { + req.fileUrl = fakeUrl; + next(); + }) +}; + +exports['handlers/export.js'] = { + before: function(done) { + var router = proxyquire('../../../../lib/routes/forms/submissions/handlers/export', { + '../../../../middleware/buildJobMiddleware': function() { + return mockMiddleware; + } + }); + var app = express(); + + this.app = app; + app.use(router); + done(); + }, + 'GET export/': { + 'should return all jobs': function(done) { + supertest(this.app) + .get('/export') + .expect('Content-Type', /json/) + .expect(200, jobMocks, done); + } + }, + 'GET export/:id': { + 'should return a single job by id': function(done) { + supertest(this.app) + .get('/export/123') + .expect(function() { + // called with correct Id + mockMiddleware.find.calledWith(sinon.match.object, + sinon.match.object, + sinon.match.function, + '123'); + }) + .expect(200, jobMocks[0], done); + } + }, + 'POST export/': { + 'should create a job by invoking the runner function': function(done) { + supertest(this.app) + .post('/export') + .expect(200, jobMocks[1], done); + } + }, + 'POST export/:id': { + 'should return a file download uri': function(done) { + supertest(this.app) + .post('/export/123') + .expect(function() { + assert.ok(mockMiddleware.ensureFinishedAndRegistered.called); + assert.ok(mockMiddleware.generateURL.called); + }) + .expect(200, fakeUrl, done); + } + } +}; \ No newline at end of file diff --git a/test/unit/routes/forms/test-submissions-router.js b/test/unit/routes/forms/test-submissions-router.js new file mode 100644 index 0000000..1459114 --- /dev/null +++ b/test/unit/routes/forms/test-submissions-router.js @@ -0,0 +1,297 @@ +var supertest = require('supertest'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../../fixtures'); +var stubs = require('../../../stubs'); +var express = require('express'); +var assert = require('assert'); +var util = require('util'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); +var sinon = require('sinon'); +var bodyParser = require('body-parser'); +var _ = require('underscore'); +var CONSTANTS = require('../../../../lib/constants'); + +describe("Admin Submissions Router", function(){ + var baseRoutePath = '/:domain/:environment/appforms/submissions'; + var baseUrl = '/' + fixtures.mockDomain + '/' + fixtures.mockEnv + '/appforms/submissions'; + var expectedFilter = 'testfiltervalue'; + + var getMaxLimitValueStub = sinon.stub(); + getMaxLimitValueStub.withArgs(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY).returns(fixtures.config.fhmbaas.pagination.maxLimit); + getMaxLimitValueStub.withArgs(CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY).returns(fixtures.config.fhmbaas.pagination.defaultLimit); + getMaxLimitValueStub.throws("Invalid Arguments"); + + beforeEach(function(done){ + getMaxLimitValueStub.reset(); + done(); + }); + + it("List Submissions", function(done){ + var expectedPage = 3; + var expectedLimit = 20; + var getSubmissionsStub = stubs.forms.core.getSubmissions({ + expectedPage: expectedPage, + expectedLimit: expectedLimit, + expectedFilter: expectedFilter + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + getSubmissions: getSubmissionsStub + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger), + value: getMaxLimitValueStub + } + }; + + var submissionsRouter = proxyquire('../../../../lib/routes/forms/submissions/router.js', mocks); + + fhConfig.setRawConfig(fixtures.config); + + var app = express(); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, submissionsRouter()); + + supertest(app) + .get(baseUrl + '/?page='+expectedPage + '&limit=' + expectedLimit + "&filter=" + expectedFilter) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (response) { + assert.ok(_.isArray(response.body.submissions), "Expected an array of submisisons"); + assert.ok(_.isNumber(response.body.total), "Expected a number for total submissions"); + assert.ok(_.isNumber(response.body.pages), "Expected a number for total submissions"); + }) + .end(function (err, res) { + if (err) { + logger.error(err, res); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(getSubmissionsStub); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY); + + done(); + }); + }); + + it("Search Submissions", function(done){ + var expectedPage = 3; + var expectedLimit = 20; + var searchSubmissionsStub = stubs.forms.core.submissionSearch({ + expectedPage: expectedPage, + expectedLimit: expectedLimit + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + submissionSearch: searchSubmissionsStub + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger), + value: getMaxLimitValueStub + } + }; + + var submissionsRouter = proxyquire('../../../../lib/routes/forms/submissions/router.js', mocks); + + fhConfig.setRawConfig(fixtures.config); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, submissionsRouter()); + + supertest(app) + .post(baseUrl + '/search?page='+expectedPage + '&limit=' + expectedLimit) + .send({ + queryFields: { + + }, + clauseOperator: "and" + }) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (response) { + assert.ok(_.isArray(response.body.submissions), "Expected an array of submisisons"); + assert.ok(_.isNumber(response.body.total), "Expected a number for total submissions"); + assert.ok(_.isNumber(response.body.pages), "Expected a number for total submissions"); + }) + .end(function (err, res) { + if (err) { + logger.error(err, res); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(searchSubmissionsStub); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY); + + done(); + }); + }); + + it("Filter Submissions", function(done){ + var expectedPage = 3; + var expectedLimit = 20; + var expectedFormId = "someformid"; + var expectedProjectId = "someprojectid"; + var getSubmissionsStub = stubs.forms.core.getSubmissions({ + expectedPage: expectedPage, + expectedLimit: expectedLimit, + expectedFormId: expectedFormId, + expectedProjectId: expectedProjectId, + expectedFilter: expectedFilter + }); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + getSubmissions: getSubmissionsStub + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger), + value: getMaxLimitValueStub + } + }; + + var submissionsRouter = proxyquire('../../../../lib/routes/forms/submissions/router.js', mocks); + + fhConfig.setRawConfig(fixtures.config); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function (req, res, next) { + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, submissionsRouter()); + + supertest(app) + .post(baseUrl + '/filter?page='+expectedPage + '&limit=' + expectedLimit + "&filter=" + expectedFilter) + .send({ + formId: expectedFormId, + appId: expectedProjectId + }) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (response) { + assert.ok(_.isArray(response.body.submissions), "Expected an array of submisisons"); + assert.ok(_.isNumber(response.body.total), "Expected a number for total submissions"); + assert.ok(_.isNumber(response.body.pages), "Expected a number for total submissions"); + }) + .end(function (err, res) { + if (err) { + logger.error(err, res); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(getSubmissionsStub); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_MAX_LIMIT_KEY); + sinon.assert.calledWith(getMaxLimitValueStub, CONSTANTS.CONFIG_PROPERTIES.PAGINATION_DEFAULT_LIMIT_KEY); + + done(); + }); + }); + + it("Exporting A Submission PDF", function(done){ + var mockSubmissionId = "somesubmissionid"; + var mockPDFFileLocation = "/some/path/to/generated/file.pdf"; + var mockCoreLocation = "http://testing.feedhenry.me"; + + var generateSubmissionPdfStub = sinon.stub(); + generateSubmissionPdfStub.callsArgWith(1, undefined, mockPDFFileLocation); + + var getValueStub = sinon.stub(); + getValueStub.withArgs(sinon.match('fhmbaas.pdfExportDir')).returns(mockPDFFileLocation); + + var createReadStreamStub = sinon.stub().withArgs(sinon.match(mockPDFFileLocation)).returns(new fixtures.MockReadStream()); + + var mocks = { + 'fh-forms': { + '@global': true, + core: { + generateSubmissionPdf: generateSubmissionPdfStub + } + }, + 'fh-config': { + '@global': true, + value: getValueStub, + getLogger: sinon.stub().returns(logger) + }, + 'fs': { + '@global': true, + createReadStream: createReadStreamStub + } + }; + + var submissionsRouter = proxyquire('../../../../lib/routes/forms/submissions/router.js', mocks); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function(req, res, next){ + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, submissionsRouter()); + + supertest(app) + .post(baseUrl + '/' + mockSubmissionId + "/exportpdf") + .send({ + coreLocation: mockCoreLocation + }) + .expect(200) + .expect('Content-Type', 'application/pdf') + .expect('Content-Disposition', 'attachment; filename="somesubmissionid.pdf"') + .end(function (err) { + if(err){ + logger.error(err); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + sinon.assert.calledOnce(createReadStreamStub); + sinon.assert.calledOnce(generateSubmissionPdfStub); + sinon.assert.calledWith(generateSubmissionPdfStub, sinon.match({ + uri: fixtures.mockMongoUrl, + _id: mockSubmissionId, + pdfExportDir: mockPDFFileLocation, + filesAreRemote: false, + location: sinon.match(mockCoreLocation) + })); + + done(); + }); + }); + +}); diff --git a/test/unit/routes/services/test-services-routes.js b/test/unit/routes/services/test-services-routes.js new file mode 100644 index 0000000..dcc28e7 --- /dev/null +++ b/test/unit/routes/services/test-services-routes.js @@ -0,0 +1,190 @@ +var supertest = require('supertest'); +var proxyquire = require('proxyquire'); +var fixtures = require('../../../fixtures'); +var stubs = require('../../../stubs'); +var express = require('express'); +var assert = require('assert'); +var util = require('util'); +var fhConfig = require('fh-config'); +fhConfig.setRawConfig(fixtures.config); +var logger = fhConfig.getLogger(); +var sinon = require('sinon'); +var bodyParser = require('body-parser'); +var _ = require('underscore'); + +var baseRoutePath = '/:domain/:environment/services'; +var baseUrl = '/' + fixtures.mockDomain + '/' + fixtures.mockEnv + '/services'; + +function createMocks(mockServiceModel){ + return { + 'fh-service-auth': { + '@global': true, + model: { + get: mockServiceModel + } + }, + 'fh-config': { + '@global': true, + getLogger: sinon.stub().returns(logger) + }, + 'fh-mbaas-middleware': { + '@global': true, + envMongoDb: { + getOrCreateEnvironmentDatabase: sinon.stub().callsArg(2) + } + } + }; +} + +module.exports = { + "It Should List Services": function(done){ + var mockService = fixtures.services.get(); + var find = stubs.fhServiceAuth.model.find(); + var mockServiceModel = stubs.fhServiceAuth.model.get({ + find: find + }); + + var servicesRouter = proxyquire('../../../../lib/routes/services/router.js', createMocks(mockServiceModel)); + + var app = express(); + + app.use(function(req, res, next){ + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, servicesRouter); + + supertest(app) + .get(baseUrl + '/') + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body[0].guid, mockService.guid); + }) + .end(function (err) { + if(err){ + console.error(err); + } + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, mockServiceModel.callCount); + assert.equal(1, find.callCount); + + done(); + }); + }, + "It Should Remove Services": function(done){ + var mockService = fixtures.services.get(); + var remove = stubs.fhServiceAuth.model.remove(); + var findOne = stubs.fhServiceAuth.model.findOne({ + remove: remove + }); + var mockServiceModel = stubs.fhServiceAuth.model.get({ + findOne: findOne + }); + + var servicesRouter = proxyquire('../../../../lib/routes/services/router.js', createMocks(mockServiceModel)); + + var app = express(); + + app.use(function(req, res, next){ + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, servicesRouter); + + supertest(app) + .delete(baseUrl + '/' + mockService.guid) + .expect(204) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, mockServiceModel.callCount); + assert.equal(1, findOne.callCount); + assert.equal(1, remove.callCount); + + done(); + }); + }, + "It Should Get A Single Service": function(done){ + var mockService = fixtures.services.get(); + var findOne = stubs.fhServiceAuth.model.findOne(); + var mockSericeModel = stubs.fhServiceAuth.model.get({ + findOne: findOne + }); + + + var servicesRouter = proxyquire('../../../../lib/routes/services/router.js', createMocks(mockSericeModel)); + + var app = express(); + + app.use(function(req, res, next){ + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, servicesRouter); + + supertest(app) + .get(baseUrl + '/' + mockService.guid) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body.guid, mockService.guid); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, mockSericeModel.callCount); + assert.equal(1, findOne.callCount); + + done(); + }); + }, + "It Should Deploy A Service": function(done){ + var mockDs = fixtures.forms.dataSources.get(); + var mockService = fixtures.services.get(); + var save = stubs.fhServiceAuth.model.save(); + var findOneOrCreate = stubs.fhServiceAuth.model.findOneOrCreate({ + save: save + }); + var mockSericeModel = stubs.fhServiceAuth.model.get({ + findOneOrCreate: findOneOrCreate + }); + + var servicesRouter = proxyquire('../../../../lib/routes/services/router.js', createMocks(mockSericeModel)); + + var app = express(); + + app.use(bodyParser.json()); + + app.use(function(req, res, next){ + req.mongoUrl = fixtures.mockMongoUrl; + next(); + }); + + app.use(baseRoutePath, servicesRouter); + + supertest(app) + .post(baseUrl + '/' + mockService.guid + "/deploy") + .send(_.extend(mockService, {dataSources: [mockDs._id]})) + .expect(200) + .expect('Content-Type', /json/) + .expect(function (res) { + assert.equal(res.body.guid, mockService.guid); + assert.ok(res.body.dataSources); + }) + .end(function (err) { + assert.ok(!err, "Expected No Error " + util.inspect(err)); + + assert.equal(1, mockSericeModel.callCount); + assert.equal(1, findOneOrCreate.callCount); + assert.equal(1, save.callCount); + + done(); + }); + } +}; + diff --git a/test/unit/services/test-deleteEnvironmentData.js b/test/unit/services/test-deleteEnvironmentData.js new file mode 100644 index 0000000..b4d4cd5 --- /dev/null +++ b/test/unit/services/test-deleteEnvironmentData.js @@ -0,0 +1,92 @@ +'use strict'; + +const UNDER_TEST = "../../../lib/services/environment/deleteEnvironmentData.js"; +const proxyquire = require('proxyquire'); +const assert = require('assert'); + +module.exports = { + "test_delete_environment_data_no_apps": function test_delete_environment_data_no_apps(done) { + let dropEnvironmentDatabaseCalled = 0; + let mocks = { + '../../services/appmbaas/listDeployedApps': function(domain, environment, callback){ + return callback(undefined,[]); + }, + 'fh-mbaas-middleware': { + "envMongoDb":{ + "dropEnvironmentDatabase": function(domain,environment,callback){ + dropEnvironmentDatabaseCalled++ ; + assert.equal(domain,"testing","should have been called with testing domain"); + assert.equal(environment,"development","should have been called with development environment"); + return callback(); + } + } + } + }; + let delEnv = proxyquire(UNDER_TEST,mocks); + delEnv("testing","development", function complete(err) { + assert.ok(! err, " did not expect an error deleting environment data"); + assert.ok(dropEnvironmentDatabaseCalled === 1,"expected dropEnvironmentDatabase to be called once"); + done(); + }); + }, + "test_delete_environment_data_apps": function test_delete_environment_data_apps(done) { + let removeAppDbCalled = 0; + let dropEnvironmentDatabaseCalled = 0; + let mocks = { + '../../services/appmbaas/listDeployedApps': function(domain, environment, callback){ + return callback(undefined,[{ + "name":"test" + }]); + }, + 'fh-mbaas-middleware': { + "envMongoDb":{ + "dropEnvironmentDatabase": function(domain,environment,callback){ + dropEnvironmentDatabaseCalled++; + return callback(); + } + } + }, + '../../services/appmbaas/removeAppDb.js':function(mongo, domain, app, environment, callback) { + assert.ok(app.name,"test","expected an app with name test"); + removeAppDbCalled++; + return callback(); + } + }; + let delEnv = proxyquire(UNDER_TEST,mocks); + delEnv("testing","development", function complete(err) { + assert.ok(! err, " did not expect an error deleting environment data"); + assert.ok(removeAppDbCalled === 1, "expected removeAppDb to be called once"); + assert.ok(dropEnvironmentDatabaseCalled === 1, "expected dropEnvironmentDatabase to be called once"); + done(); + }); + }, + "test_delete_environment_error_dropping_db": function test_delete_environment_error_dropping_db(done){ + let mocks = { + '../../services/appmbaas/listDeployedApps': function(domain, environment, callback){ + return callback(undefined,[{ + "name":"test" + },{ + "name":"test2" + }]); + }, + 'fh-mbaas-middleware': { + "envMongoDb":{ + "dropEnvironmentDatabase": function(domain,environment,callback){ + return callback(); + } + } + }, + '../../services/appmbaas/removeAppDb.js':function(mongo, domain, app, environment, callback) { + assert.ok(app.name,"test","expected an app with name test"); + return callback(new Error("failed to remove db")); + } + }; + let delEnv = proxyquire(UNDER_TEST,mocks); + delEnv("testing","development", function complete(err) { + assert.ok(err, " expected an error removing the apps db"); + assert.ok(Array.isArray(err),"expected the err to be an array"); + assert.ok(err.length === 2, "expected 2 errors in the array"); + done(); + }); + } +}; diff --git a/test/unit/services/test-removeAppDb.js b/test/unit/services/test-removeAppDb.js new file mode 100644 index 0000000..6284514 --- /dev/null +++ b/test/unit/services/test-removeAppDb.js @@ -0,0 +1,83 @@ +"use strict"; + +const proxyquire = require('proxyquire'); +const UNDER_TEST = "../../../lib/services/appmbaas/removeAppDb.js"; +const assert = require('assert'); +const util = require('util'); + +module.exports = { + "test_remove_app_db_ok": function test_remove_app_db_ok(done) { + let appModelRemoved = false; + let mongoDropCalled = false; + let remove = proxyquire(UNDER_TEST, {}); + let appModel = { + "type": "openshift3", + "dbConf": { + "user": "test" + }, + "remove": function (cb) { + appModelRemoved = true; + return cb(); + } + }; + let mongo = { + "dropDb": function (config, user, name, callback) { + mongoDropCalled = true; + return callback(); + } + }; + + remove(mongo, "testing", appModel, "dev", function complete(err) { + assert.ok(!err, " did not expect an error removing app db " + util.inspect(err)); + assert.ok(appModelRemoved, "expected the appmodel to have been removed"); + assert.ok(mongoDropCalled, "expected the mongoDrop to have been called"); + done(); + }); + }, + "test_remove_app_db_error": function test_remove_app_db_error(done) { + let remove = proxyquire(UNDER_TEST, {}); + let appModel = { + "type": "openshift3", + "dbConf": { + "user": "test" + }, + "remove": function (cb) { + return cb(new Error("failed to remove db")); + } + }; + let mongo = { + "dropDb": function (config, user, name, callback) { + return callback(); + } + }; + remove(mongo,"testing",appModel,"dev",function complete(err){ + assert.ok(err, "expected an error removing the app db"); + done(); + }); + }, + "test_remove_app_db_not_os3": function test_remove_app_db_not_os3(done) { + let ditchCalled = false; + let remove = proxyquire(UNDER_TEST, { + '../../util/ditchhelper.js':{ + "removeAppCollection":function (appname, cb){ + ditchCalled = true; + return cb(); + } + } + }); + let appModel = { + "type": "dynoman", + "dbConf": { + "user": "test" + }, + "remove": function (cb) { + return cb(); + } + }; + remove({},"testing",appModel,"dev",function complete(err){ + assert.ok(! err, "did not expect an error removing the app db"); + assert.ok(ditchCalled, "expected ditch to be called"); + done(); + }); + } +}; diff --git a/test/unit/storage/test_router.js b/test/unit/storage/test_router.js new file mode 100644 index 0000000..b54e8f9 --- /dev/null +++ b/test/unit/storage/test_router.js @@ -0,0 +1,121 @@ +const mockgoose = require('mockgoose'); +const mongoose = require('mongoose'); +mockgoose(mongoose); +const proxyquire = require('proxyquire'); +const express = require('express'); +const fhConfig = require('fh-config'); +const fixtures = require('../../fixtures'); +const path = require('path'); +const url = require('url'); +const supertest = require('supertest'); +fhConfig.setRawConfig(fixtures.config); + +var router; +var storage; +var models; +var app; + +var FAKE_JOB_ID; + +// must be an absolute path +var testPath = path.resolve(__dirname, '../../../README.md'); + +// Create a dummy import job to allow file uploads +// (import jobs are created before the upload has finished) +function createFakeJob(Model, cb) { + var job = new Model(); + job.jobType = "import"; + job.domain = "testing"; + job.environment = 'dev'; + job.appid = "ad7eykyyaqpcei52a5owfi2a"; + job.metadata = { + fileSize: 1024, + filePath: "/tmp", + uploadFinished: false + }; + job.save(cb); +} + +// All call-through +var mockRouter = {}; + +exports['storage#router'] = { + before: function(done) { + mongoose.connect('test', function() { + models = require('../../../lib/models'); + models.init(mongoose.connection, function () { + storage = proxyquire('../../../lib/storage', { + './models/FileSchema': models, + + // This seems to be required for the router to pick up the mocked fhConfig + './impl/router.js': proxyquire('../../../lib/storage/impl/router', mockRouter) + }); + + router = storage.router; + app = express(); + app.use('/api/storage/', router); + + createFakeJob(models.AppdataJob, function (err, job) { + FAKE_JOB_ID = job._id; + done(); + }); + }); + }); + }, + after: function(done) { + mongoose.connection.close(done); + }, + 'GET /api/storage/:resourceId': { + before: function(done) { + // register test file + var self = this; + + storage.registerFile(testPath, function(err, model) { + self.file = model; + storage.generateURL(model._id, null, 600, function(err, urlObj) { + self.url = url.parse(urlObj.url).path; + storage.generateURL(model._id, FAKE_JOB_ID, 600, function (err, uploadUrlObj) { + self.uploadUrl = url.parse(uploadUrlObj.url).path; + done(); + }); + }); + }); + }, + 'should supply a file download': function(done) { + supertest(app) + .get(this.url) + .expect(200) + .expect('Content-Type', /octet-stream/) + .expect('Content-Disposition', new RegExp(path.basename(testPath))) + .end(done); + }, + 'file uploads should work': function(done) { + var location = 'test/fixtures/appdata/import/export.tar'; + supertest(app) + .post(this.uploadUrl) + .attach('file', location) + .expect(200) + .end(done); + }, + 'should 404 on invalid file resource': function(done) { + var parsed = url.parse(this.url); + parsed.pathname = path.resolve(parsed.pathname, "../non-existant-file"); + var fakeUrl = url.format(parsed); + + supertest(app) + .get(fakeUrl) + .expect(404) + .end(done); + }, + 'should 401 on invalid token': function(done) { + var parsed = url.parse(this.url); + parsed.search = '?token=invalid'; + var fakeUrl = url.format(parsed); + + supertest(app) + .get(fakeUrl) + .expect(401) + .end(done); + } + } +}; diff --git a/test/unit/storage/test_storage.js b/test/unit/storage/test_storage.js new file mode 100644 index 0000000..645e60b --- /dev/null +++ b/test/unit/storage/test_storage.js @@ -0,0 +1,96 @@ +const assert = require('assert'); +const path = require('path'); +const proxyquire = require('proxyquire'); +const fhConfig = require('fh-config'); +const mockgoose = require('mockgoose'); +const mongoose = require('mongoose'); +mockgoose(mongoose); +const fixtures = require('../../fixtures'); +fhConfig.setRawConfig(fixtures.config); + +// must be an absolute path +var testPath = path.resolve(__dirname, '../../../README.md'); + +var models; +var storage; + +function createModel(done) { + var self = this; + storage.registerFile(testPath, function(err, model) { + self.model = model; + done(err); + }); +} + +exports['storage'] = { + before: function(done) { + mongoose.connect('test', function() { + models = require('../../../lib/storage/models/FileSchema'); + models.createModel(mongoose.connection); + storage = proxyquire('../../../lib/storage', { + './models/FileSchema': models + }); + done(); + }); + }, + after: function(done) { + mongoose.connection.close(done); + }, + '#registerFile': { + 'should register new file': function(done) { + storage.registerFile(testPath, function(err, model) { + assert.ok(!err); + assert.equal(model.fileName, path.basename(testPath)); + done(); + }); + }, + 'should validate file existance': function(done) { + var path = '/doesnt/exist.txt'; + storage.registerFile(path, function(err) { + assert.ok(err); + done(); + }); + }, + 'should not accept folders': function(done) { + var p = path.resolve(__dirname, '../'); + storage.registerFile(p, function(err) { + assert.ok(err && /file/.test(err.message)); + done(); + }); + }, + 'should not accept relative paths': function(done) { + var p = '../'; + storage.registerFile(p, function(err) { + assert.ok(err && /absolute/.test(err.message)); + done(); + }); + } + }, + '#getFileDetails': { + before: createModel, + 'should return existing model': function(done) { + var self = this; + storage.getFileDetails(self.model._id, function(err, model) { + assert.equal(self.model.filename, model.filename); + done(); + }); + }, + 'should error on non-existing id': function(done) { + storage.getFileDetails('123', function(err) { + assert.ok(err && /id/.test(err.message)); + done(); + }); + } + }, + '#generateURL': { + before: createModel, + 'should generate a url for download': function(done) { + var self = this; + storage.generateURL(this.model._id, null, 600, function(err, urlObj) { + assert.ok(!err); + assert.ok((new RegExp(self.model._id)).test(urlObj.url)); + done(); + }); + } + } +}; diff --git a/test/unit/util/test-common.js b/test/unit/util/test-common.js new file mode 100644 index 0000000..2abf20d --- /dev/null +++ b/test/unit/util/test-common.js @@ -0,0 +1,132 @@ +var util = require('util'); +var assert = require('assert'); + +var cfg = { + mongo: { + host: 'localhost', + port: 8888, + admin_auth : { + user: 'admin', + pass: 'admin' + } + }, + fhdfc: { + "dynofarm": "http://localhost:9000", + "username":"DYNOFARM_USERNAME", + "_password": "DYNOFARM_PASSWORD", + "loglevel": "warn", + "cache_timeout": 1234123 + } +}; + +var fhconfig = require('fh-config'); +fhconfig.setRawConfig(cfg); + +var common = require('lib/util/common.js'); + +exports.it_should_handle_error = function(finish) { + var req = { + id: 123 + }; + var res = { + statusCode: 0, + end: function(msg) { + assert.equal(res.statusCode, 1, 'Expected statusCode of 1, got: ' + res.statusCode); + assert.notEqual(msg.indexOf('ignore'), -1); + finish(); + } + }; + common.handleError('dummy error', 'ignore the dummy error', 1, req, res); +}; + +exports.it_should_inspect_error = function(finish) { + common.logError({}, "", "", {}); + finish(); +} + +exports.it_should_get_ip_address_header = function(finish){ + var req = { + "headers": { + "x-forwarded-for" : "192.168.1.1" + } + } + + var testIPAddress = common.getIPAddress(req); + assert.equal("192.168.1.1", testIPAddress); + return finish(); +}; + +exports.it_should_get_ip_address_connection = function(finish){ + var req = { + "connection" : { + "remoteAddress" : "192.168.1.2" + } + } + + var testIPAddress = common.getIPAddress(req); + assert.strictEqual("192.168.1.2", testIPAddress); + return finish(); +}; + + +exports.it_should_get_version = function(finish) { + common.getVersion(function(err, version) { + assert.ok(!err, 'Error: ' + util.inspect(err)); + assert.ok(version); + + // call it twice to coverage pkg caching in common.js + common.getVersion(function(err, version2) { + assert.equal(version, version2); + return finish(); + }); + }); +}; + +exports.it_should_sort_objects = function(finish) { + var accessBos = { + 'a.b.z': '789', + 'a': '123', + 'a.b': '456' + }; + var sorted = common.sortObject(accessBos); + assert.equal(sorted[0]['a'], accessBos['a']); + assert.equal(sorted[2]['a.b.z'], accessBos['a.b.z']); + + // re-passing the sorted array should fail + var gotException = false; + try { + sorted = common.sortObject(sorted); + }catch(x) { + gotException = true; + } + + assert.ok(gotException, 'Expected assertion error when passing array'); + + finish(); +}; + +exports.it_should_get_random_password = function(finish){ + var pass = common.randomPassword(); + assert.ok(pass.length > 0); + finish(); +}; + +exports.it_should_get_random_user = function(finish) { + var user = common.randomUser(); + assert.ok(user.length > 0); + finish(); +} + +exports.it_should_create_password_of_given_length = function(finish) { + var password_length = 10; + var password = common.make_passwd(password_length, "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890"); + assert.ok(password.length, password_length); + finish(); +} + +exports.it_should_create_empty_password_using_non_positive_length = function(finish) { + var password_length = -1; + var password = common.make_passwd(password_length, "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890"); + assert.equal(password, ''); + finish(); +} diff --git a/test/unit/util/test-dfutils.js b/test/unit/util/test-dfutils.js new file mode 100644 index 0000000..42678cd --- /dev/null +++ b/test/unit/util/test-dfutils.js @@ -0,0 +1,52 @@ +var proxyquire = require('proxyquire'); +var assert = require('assert'); + +var DOMAIN = "test"; +var ENVIRONMENT = "test"; +var APPNAME = "testappname"; +var mockdfc = function(){ + return { + dynos : function(args, cb){ + if(args[0] === DOMAIN){ + return cb(); + } else { + return cb(new Error('not found')); + } + }, + + 'read-app': function(args, cb){ + return cb(); + }, + + 'stop-app': function(args, cb){ + assert.equal(args[0], DOMAIN, 'domain does not match'); + assert.equal(args[1], APPNAME, 'appname does not match'); + return cb(); + }, + + 'appmigrate': function(args, cb){ + assert.ok(args[0] === 'start' || args[0] === 'stop'); + assert.equal(args[1], DOMAIN, 'domain does not match'); + assert.equal(args[2], APPNAME, 'appname does not match'); + return cb(); + } + } +}; + +var dfutils = proxyquire('../../../lib/util/dfutils', {'fh-dfc': mockdfc}); + +exports.it_should_stop_app = function(finish){ + dfutils.stopApp(DOMAIN, ENVIRONMENT, APPNAME, function(err){ + assert.ok(!err, 'failed to stop app'); + dfutils.clearInterval(); //need to call this otherwise the test runner will not finish + finish(); + }); +}; + +exports.it_should_migrate_app = function(finish){ + dfutils.migrateAppDb('start', DOMAIN, ENVIRONMENT, APPNAME, function(err){ + assert.ok(!err, 'failed to start app migration'); + dfutils.clearInterval(); //need to call this otherwise the test runner will not finish + finish(); + }); +} diff --git a/test/unit/util/test-ditchhelper.js b/test/unit/util/test-ditchhelper.js new file mode 100644 index 0000000..6ce6c1e --- /dev/null +++ b/test/unit/util/test-ditchhelper.js @@ -0,0 +1,63 @@ +var proxyquire = require('proxyquire'); +var assert = require('assert'); +var sinon = require('sinon'); +var request = require('request'); +var fhconfig = require('fh-config'); + + +var DOMAIN = 'test'; +var ENVIRONMENT = 'test'; +var APPNAME = 'testappname'; + +exports.it_should_call_fh_ditch = function(finish){ + var mock = sinon.mock(request); + var post = mock.expects('post'); + var ditchhelper = proxyquire('../../../lib/util/ditchhelper', {request: mock}); + + var cb1 = sinon.spy(); + var cb2 = sinon.spy(); + + post.callsArg(1); + ditchhelper.doMigrate(DOMAIN, ENVIRONMENT, APPNAME, 'testcachekey', 'testappguid',"http:/test.feedhenry.com", cb1); + post.once(); + post.calledWith({url: 'http://localhost:9999/sys/admin/migratedb', json:{ + cacheKey: 'testcachekey', + domain: DOMAIN, + env: ENVIRONMENT, + appName: APPNAME, + appGuid: 'testappguid' + }}, cb1); + post.verify(); + assert.ok(cb1.calledOnce); + finish(); +}; + +exports.it_should_check_status = function(finish){ + var mock = sinon.mock(request); + var get = mock.expects('get'); + var ditchhelper = proxyquire('../../../lib/util/ditchhelper', {request: mock}); + + var cb = sinon.spy(); + get.callsArgWith(1, null, {}, {}); + ditchhelper.checkStatus(cb); + get.calledWith({url: 'http://localhost:9999/sys/info/status', json: true}, cb); + get.verify(); + assert.ok(get.calledOnce); + finish(); +}; + + +exports.it_should_removeAppCollection = function(finish){ + var mock = sinon.mock(request); + var del = mock.expects('del'); + var ditchhelper = proxyquire('../../../lib/util/ditchhelper', {request: mock}); + + var cb = sinon.spy(); + del.callsArgWith(1, null, {}, {}); + ditchhelper.removeAppCollection("test-app-dev", cb); + del.calledWith({url: 'http://localhost:9999/admin/dropCollection', json: true}, cb); + del.verify(); + assert.ok(del.calledOnce); + finish(); +}; +