From 2eb736474a98b86edc552d00efa1fb4dcdf1bbb8 Mon Sep 17 00:00:00 2001 From: Todd Anderson <127344469+tanderson-ld@users.noreply.github.com> Date: Tue, 21 May 2024 16:37:48 -0500 Subject: [PATCH] chore: updating CICD to support java server sdk package (#19) This PR copies existing java-server-sdk code from the public repo and takes the same approach as other migrated packages. I made a separate commit for the code copy to make that easier to verify. All code in lib/sdk/server should be identical to the [existing repo ](https://github.com/launchdarkly/java-server-sdk)with no changes intended except for release-please annotations put on [Version.java](https://github.com/launchdarkly/java-core/pull/19/files#diff-880b7c7410f8d25590e54584048633f4068b855561e85ca77ea767b7514bb82f). Please check them carefully. --- .../package-server-sdk--bug_report.md | 36 + .../package-server-sdk--feature_request.md | 20 + .github/actions/contract-tests/action.yml | 17 + .github/workflows/java-server-sdk.yml | 70 ++ .github/workflows/manual-publish-docs.yml | 1 + .github/workflows/manual-publish.yml | 1 + .github/workflows/release-please.yml | 32 + .release-please-manifest.json | 3 +- .sdk_metadata.json | 13 + lib/sdk/server/.gitignore | 21 + lib/sdk/server/CHANGELOG.md | 854 ++++++++++++++++ lib/sdk/server/CODEOWNERS | 2 + lib/sdk/server/CONTRIBUTING.md | 77 ++ lib/sdk/server/LICENSE | 13 + lib/sdk/server/Makefile | 33 + lib/sdk/server/README.md | 79 ++ lib/sdk/server/SECURITY.md | 5 + lib/sdk/server/benchmarks/Makefile | 36 + lib/sdk/server/benchmarks/build.gradle | 67 ++ lib/sdk/server/benchmarks/settings.gradle | 1 + .../server/LDClientEvaluationBenchmarks.java | 159 +++ .../launchdarkly/sdk/server/TestValues.java | 117 +++ lib/sdk/server/build.gradle | 644 ++++++++++++ .../server/config/checkstyle/checkstyle.xml | 19 + .../server/config/checkstyle/suppressions.xml | 12 + lib/sdk/server/contract-tests/README.md | 7 + .../server/contract-tests/gradle.properties | 1 + .../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59536 bytes .../gradle/wrapper/gradle-wrapper.properties | 5 + lib/sdk/server/contract-tests/gradlew | 234 +++++ lib/sdk/server/contract-tests/gradlew.bat | 89 ++ .../contract-tests/service/build.gradle | 48 + .../contract-tests/service/settings.gradle | 0 .../BigSegmentCallbackRepresentation.java | 17 + .../sdktest/BigSegmentCallbackService.java | 57 ++ .../java/sdktest/BigSegmentStoreFixture.java | 52 + .../java/sdktest/HookCallbackService.java | 42 + .../sdktest/MigrationCallbackService.java | 33 + .../main/java/sdktest/Representations.java | 223 +++++ .../main/java/sdktest/SdkClientEntity.java | 469 +++++++++ .../src/main/java/sdktest/TestHook.java | 89 ++ .../src/main/java/sdktest/TestService.java | 158 +++ .../service/src/main/resources/logback.xml | 20 + lib/sdk/server/contract-tests/settings.gradle | 3 + lib/sdk/server/gradle.properties | 11 + lib/sdk/server/gradle.properties.example | 8 + .../server/gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59536 bytes .../gradle/wrapper/gradle-wrapper.properties | 5 + lib/sdk/server/gradlew | 234 +++++ lib/sdk/server/gradlew.bat | 89 ++ lib/sdk/server/packaging-test/Makefile | 162 +++ .../packaging-test/run-non-osgi-test.sh | 50 + .../server/packaging-test/run-osgi-test.sh | 57 ++ .../packaging-test/test-app/build.gradle | 70 ++ .../packaging-test/test-app/settings.gradle | 1 + .../testapp/JsonSerializationTestData.java | 44 + .../src/main/java/testapp/TestApp.java | 82 ++ .../main/java/testapp/TestAppGsonTests.java | 42 + .../java/testapp/TestAppJacksonTests.java | 43 + .../java/testapp/TestAppOsgiEntryPoint.java | 17 + lib/sdk/server/scripts/release.sh | 18 + lib/sdk/server/scripts/update-version.sh | 12 + lib/sdk/server/settings.gradle | 1 + .../sdk/json/SdkSerializationExtensions.java | 16 + .../BigSegmentStoreStatusProviderImpl.java | 30 + .../sdk/server/BigSegmentStoreWrapper.java | 167 ++++ .../sdk/server/ClientContextImpl.java | 132 +++ .../launchdarkly/sdk/server/Components.java | 457 +++++++++ .../sdk/server/ComponentsImpl.java | 497 ++++++++++ .../launchdarkly/sdk/server/DataModel.java | 733 ++++++++++++++ .../sdk/server/DataModelDependencies.java | 259 +++++ .../sdk/server/DataModelPreprocessing.java | 313 ++++++ .../sdk/server/DataModelSerialization.java | 367 +++++++ .../server/DataSourceStatusProviderImpl.java | 38 + .../sdk/server/DataSourceUpdatesImpl.java | 360 +++++++ .../server/DataStoreStatusProviderImpl.java | 42 + .../sdk/server/DataStoreUpdatesImpl.java | 29 + .../server/DefaultEventProcessorWrapper.java | 82 ++ .../sdk/server/DefaultFeatureRequestor.java | 116 +++ .../launchdarkly/sdk/server/EvalResult.java | 260 +++++ .../sdk/server/EvalResultAndFlag.java | 22 + .../sdk/server/EvaluationOptions.java | 35 + .../sdk/server/EvaluationRecorder.java | 25 + .../launchdarkly/sdk/server/Evaluator.java | 575 +++++++++++ .../sdk/server/EvaluatorBucketing.java | 76 ++ .../sdk/server/EvaluatorHelpers.java | 146 +++ .../sdk/server/EvaluatorInterface.java | 38 + .../sdk/server/EvaluatorOperators.java | 124 +++ .../sdk/server/EvaluatorTypeConversion.java | 48 + .../sdk/server/EvaluatorWithHooks.java | 74 ++ .../sdk/server/EventBroadcasterImpl.java | 117 +++ .../sdk/server/FeatureFlagsState.java | 388 ++++++++ .../sdk/server/FeatureRequestor.java | 26 + .../sdk/server/FlagTrackerImpl.java | 67 ++ .../sdk/server/FlagsStateOption.java | 49 + .../sdk/server/InMemoryDataStore.java | 121 +++ .../sdk/server/InputValidatingEvaluator.java | 302 ++++++ .../launchdarkly/sdk/server/JsonHelpers.java | 127 +++ .../com/launchdarkly/sdk/server/LDClient.java | 526 ++++++++++ .../com/launchdarkly/sdk/server/LDConfig.java | 396 ++++++++ .../com/launchdarkly/sdk/server/Loggers.java | 27 + .../launchdarkly/sdk/server/MigrationOp.java | 20 + .../sdk/server/MigrationOpTracker.java | 307 ++++++ .../sdk/server/MigrationOrigin.java | 16 + .../sdk/server/MigrationStage.java | 78 ++ .../MigrationStageEnforcingEvaluator.java | 42 + .../sdk/server/MigrationVariation.java | 36 + .../sdk/server/NoOpEventProcessor.java | 47 + .../PersistentDataStoreStatusManager.java | 110 +++ .../server/PersistentDataStoreWrapper.java | 463 +++++++++ .../sdk/server/PollingProcessor.java | 135 +++ .../sdk/server/SemanticVersion.java | 179 ++++ .../server/ServerSideDiagnosticEvents.java | 97 ++ .../ServerSideEventContextDeduplicator.java | 39 + .../sdk/server/SimpleLRUCache.java | 24 + .../sdk/server/StandardEndpoints.java | 52 + .../sdk/server/StreamProcessor.java | 430 ++++++++ .../sdk/server/StreamProcessorEvents.java | 270 +++++ .../com/launchdarkly/sdk/server/Util.java | 112 +++ .../com/launchdarkly/sdk/server/Version.java | 10 + .../integrations/ApplicationInfoBuilder.java | 84 ++ .../BigSegmentsConfigurationBuilder.java | 165 ++++ .../integrations/EvaluationSeriesContext.java | 47 + .../integrations/EventProcessorBuilder.java | 214 ++++ .../sdk/server/integrations/FileData.java | 142 +++ .../integrations/FileDataSourceBuilder.java | 182 ++++ .../integrations/FileDataSourceImpl.java | 302 ++++++ .../integrations/FileDataSourceParsing.java | 217 ++++ .../sdk/server/integrations/Hook.java | 94 ++ .../sdk/server/integrations/HookMetadata.java | 20 + .../HooksConfigurationBuilder.java | 52 + .../HttpConfigurationBuilder.java | 172 ++++ .../LoggingConfigurationBuilder.java | 164 +++ .../PersistentDataStoreBuilder.java | 198 ++++ .../PollingDataSourceBuilder.java | 69 ++ .../integrations/ServiceEndpointsBuilder.java | 203 ++++ .../StreamingDataSourceBuilder.java | 65 ++ .../sdk/server/integrations/TestData.java | 931 ++++++++++++++++++ .../integrations/WrapperInfoBuilder.java | 42 + .../sdk/server/integrations/package-info.java | 11 + .../integrations/reactor/LDReactorClient.java | 187 ++++ .../reactor/LDReactorClientInterface.java | 243 +++++ .../integrations/reactor/package-info.java | 4 + .../server/interfaces/ApplicationInfo.java | 46 + .../BigSegmentStoreStatusProvider.java | 135 +++ .../interfaces/BigSegmentsConfiguration.java | 94 ++ .../server/interfaces/ConsistencyCheck.java | 21 + .../interfaces/DataSourceStatusProvider.java | 386 ++++++++ .../interfaces/DataStoreStatusProvider.java | 261 +++++ .../server/interfaces/FlagChangeEvent.java | 37 + .../server/interfaces/FlagChangeListener.java | 36 + .../sdk/server/interfaces/FlagTracker.java | 85 ++ .../interfaces/FlagValueChangeEvent.java | 62 ++ .../interfaces/FlagValueChangeListener.java | 41 + .../server/interfaces/HttpAuthentication.java | 52 + .../server/interfaces/LDClientInterface.java | 428 ++++++++ .../server/interfaces/ServiceEndpoints.java | 51 + .../sdk/server/interfaces/WrapperInfo.java | 40 + .../sdk/server/interfaces/package-info.java | 16 + .../sdk/server/migrations/Migration.java | 570 +++++++++++ .../server/migrations/MigrationBuilder.java | 183 ++++ .../server/migrations/MigrationExecution.java | 81 ++ .../migrations/MigrationExecutionMode.java | 17 + .../migrations/MigrationMethodResult.java | 107 ++ .../migrations/MigrationSerialOrder.java | 15 + .../sdk/server/migrations/package-info.java | 7 + .../launchdarkly/sdk/server/package-info.java | 10 + .../server/subsystems/BigSegmentStore.java | 45 + .../subsystems/BigSegmentStoreTypes.java | 225 +++++ .../sdk/server/subsystems/ClientContext.java | 217 ++++ .../subsystems/ComponentConfigurer.java | 20 + .../sdk/server/subsystems/DataSource.java | 43 + .../subsystems/DataSourceUpdateSink.java | 87 ++ .../sdk/server/subsystems/DataStore.java | 107 ++ .../sdk/server/subsystems/DataStoreTypes.java | 331 +++++++ .../subsystems/DataStoreUpdateSink.java | 20 + .../subsystems/DiagnosticDescription.java | 27 + .../sdk/server/subsystems/EventProcessor.java | 99 ++ .../sdk/server/subsystems/EventSender.java | 60 ++ .../server/subsystems/HookConfiguration.java | 31 + .../server/subsystems/HttpConfiguration.java | 141 +++ .../subsystems/LoggingConfiguration.java | 65 ++ .../subsystems/PersistentDataStore.java | 144 +++ .../subsystems/SerializationException.java | 23 + .../sdk/server/subsystems/package-info.java | 13 + .../com/launchdarkly/sdk/server/Version.java | 8 + .../com/launchdarkly/sdk/server/BaseTest.java | 51 + ...BigSegmentStoreStatusProviderImplTest.java | 61 ++ .../server/BigSegmentStoreWrapperTest.java | 277 ++++++ .../sdk/server/ClientContextImplTest.java | 110 +++ .../sdk/server/DataModelDependenciesTest.java | 381 +++++++ .../server/DataModelPreprocessingTest.java | 380 +++++++ .../server/DataModelSerializationTest.java | 767 +++++++++++++++ .../sdk/server/DataModelTest.java | 111 +++ .../DataSourceStatusProviderImplTest.java | 114 +++ .../sdk/server/DataSourceUpdatesImplTest.java | 444 +++++++++ .../DataStoreStatusProviderImplTest.java | 124 +++ .../sdk/server/DataStoreTestBase.java | 168 ++++ .../sdk/server/DataStoreTestTypes.java | 211 ++++ .../sdk/server/DataStoreUpdatesImplTest.java | 56 ++ .../server/DefaultFeatureRequestorTest.java | 247 +++++ .../sdk/server/EvalResultTest.java | 158 +++ .../sdk/server/EvaluatorBigSegmentTest.java | 195 ++++ .../sdk/server/EvaluatorBucketingTest.java | 166 ++++ .../sdk/server/EvaluatorClauseTest.java | 304 ++++++ .../EvaluatorOperatorsParameterizedTest.java | 187 ++++ .../sdk/server/EvaluatorPrerequisiteTest.java | 226 +++++ .../sdk/server/EvaluatorRuleTest.java | 198 ++++ .../sdk/server/EvaluatorSegmentMatchTest.java | 279 ++++++ .../sdk/server/EvaluatorTargetTest.java | 104 ++ .../sdk/server/EvaluatorTest.java | 483 +++++++++ .../sdk/server/EvaluatorTestBase.java | 10 + .../sdk/server/EvaluatorTestUtil.java | 164 +++ .../sdk/server/EvaluatorWithHookTest.java | 196 ++++ .../sdk/server/EventBroadcasterImplTest.java | 124 +++ .../sdk/server/FeatureFlagsStateTest.java | 225 +++++ .../server/FlagModelDeserializationTest.java | 55 ++ .../sdk/server/FlagTrackerImplTest.java | 107 ++ .../sdk/server/InMemoryDataStoreTest.java | 21 + .../sdk/server/JsonHelpersTest.java | 73 ++ .../sdk/server/LDClientBigSegmentsTest.java | 113 +++ .../sdk/server/LDClientEndToEndTest.java | 292 ++++++ .../sdk/server/LDClientEvaluationTest.java | 656 ++++++++++++ .../sdk/server/LDClientEventTest.java | 620 ++++++++++++ .../LDClientExternalUpdatesOnlyTest.java | 67 ++ .../sdk/server/LDClientListenersTest.java | 329 +++++++ .../sdk/server/LDClientOfflineTest.java | 80 ++ .../launchdarkly/sdk/server/LDClientTest.java | 497 ++++++++++ .../launchdarkly/sdk/server/LDConfigTest.java | 268 +++++ .../sdk/server/MigrationBuilderTests.java | 46 + .../server/MigrationConsistencyCheckTest.java | 117 +++ .../sdk/server/MigrationExecutionFixture.java | 82 ++ .../sdk/server/MigrationOpTrackerTests.java | 472 +++++++++ .../sdk/server/MigrationStageTests.java | 51 + ...MigrationStagesExpectedExecutionTests.java | 393 ++++++++ .../sdk/server/MigrationTests.java | 326 ++++++ .../sdk/server/MigrationVariationTests.java | 63 ++ .../sdk/server/ModelBuilders.java | 534 ++++++++++ .../PersistentDataStoreWrapperOtherTest.java | 115 +++ .../PersistentDataStoreWrapperTest.java | 731 ++++++++++++++ .../sdk/server/PollingProcessorTest.java | 405 ++++++++ .../RolloutRandomizationConsistencyTest.java | 115 +++ .../sdk/server/SemanticVersionTest.java | 248 +++++ .../ServerSideDiagnosticEventsTest.java | 514 ++++++++++ ...erverSideEventContextDeduplicatorTest.java | 66 ++ .../sdk/server/SimpleLRUCacheTest.java | 60 ++ .../sdk/server/StreamProcessorEventsTest.java | 130 +++ .../sdk/server/StreamProcessorTest.java | 877 +++++++++++++++++ .../sdk/server/TestComponents.java | 424 ++++++++ .../com/launchdarkly/sdk/server/TestUtil.java | 257 +++++ .../com/launchdarkly/sdk/server/UtilTest.java | 79 ++ .../ApplicationInfoBuilderTest.java | 27 + .../integrations/BigSegmentStoreTestBase.java | 181 ++++ .../BigSegmentStoreTestBaseTest.java | 89 ++ .../BigSegmentsConfigurationBuilderTest.java | 82 ++ .../ClientWithFileDataSourceTest.java | 48 + .../server/integrations/DataLoaderTest.java | 195 ++++ .../EventProcessorBuilderTest.java | 115 +++ .../FileDataSourceAutoUpdateTest.java | 138 +++ .../integrations/FileDataSourceTest.java | 171 ++++ .../integrations/FileDataSourceTestData.java | 50 + .../integrations/FlagFileParserJsonTest.java | 10 + .../integrations/FlagFileParserTestBase.java | 88 ++ .../integrations/FlagFileParserYamlTest.java | 10 + .../HookConfigurationBuilderTest.java | 30 + .../HttpConfigurationBuilderTest.java | 234 +++++ .../LoggingConfigurationBuilderTest.java | 89 ++ .../integrations/MockPersistentDataStore.java | 167 ++++ .../PersistentDataStoreBuilderTest.java | 64 ++ .../PersistentDataStoreGenericTest.java | 74 ++ .../PersistentDataStoreTestBase.java | 378 +++++++ .../PollingDataSourceBuilderTest.java | 37 + .../ServiceEndpointsBuilderTest.java | 84 ++ .../StreamingDataSourceBuilderTest.java | 34 + .../sdk/server/integrations/TestDataTest.java | 465 +++++++++ .../integrations/TestDataWithClientTest.java | 126 +++ .../integrations/WrapperInfoBuilderTest.java | 31 + .../sdk/server/integrations/package-info.java | 4 + .../BigSegmentMembershipBuilderTest.java | 125 +++ .../DataSourceStatusProviderTypesTest.java | 110 +++ .../DataStoreStatusProviderTypesTest.java | 79 ++ .../server/interfaces/DataStoreTypesTest.java | 164 +++ .../HttpAuthenticationTypesTest.java | 18 + .../sdk/server/interfaces/package-info.java | 4 + .../launchdarkly/sdk/server/package-info.java | 4 + .../resources/filesource/all-properties.json | 21 + .../resources/filesource/all-properties.yml | 17 + .../test/resources/filesource/flag-only.json | 12 + .../test/resources/filesource/flag-only.yml | 11 + .../filesource/flag-with-duplicate-key.json | 12 + .../test/resources/filesource/malformed.json | 1 + .../test/resources/filesource/malformed.yml | 2 + .../test/resources/filesource/no-data.json | 0 .../resources/filesource/segment-only.json | 8 + .../resources/filesource/segment-only.yml | 5 + .../segment-with-duplicate-key.json | 12 + .../test/resources/filesource/value-only.json | 6 + .../test/resources/filesource/value-only.yml | 3 + .../filesource/value-with-duplicate-key.json | 6 + lib/sdk/server/src/test/resources/logback.xml | 16 + lib/shared/common/gradle.properties | 3 + lib/shared/internal/gradle.properties | 3 + release-please-config.json | 9 + 303 files changed, 42269 insertions(+), 1 deletion(-) create mode 100644 .github/ISSUE_TEMPLATE/package-server-sdk--bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/package-server-sdk--feature_request.md create mode 100644 .github/actions/contract-tests/action.yml create mode 100644 .github/workflows/java-server-sdk.yml create mode 100644 .sdk_metadata.json create mode 100644 lib/sdk/server/.gitignore create mode 100644 lib/sdk/server/CHANGELOG.md create mode 100644 lib/sdk/server/CODEOWNERS create mode 100644 lib/sdk/server/CONTRIBUTING.md create mode 100644 lib/sdk/server/LICENSE create mode 100644 lib/sdk/server/Makefile create mode 100644 lib/sdk/server/README.md create mode 100644 lib/sdk/server/SECURITY.md create mode 100644 lib/sdk/server/benchmarks/Makefile create mode 100644 lib/sdk/server/benchmarks/build.gradle create mode 100644 lib/sdk/server/benchmarks/settings.gradle create mode 100644 lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/LDClientEvaluationBenchmarks.java create mode 100644 lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/TestValues.java create mode 100644 lib/sdk/server/build.gradle create mode 100644 lib/sdk/server/config/checkstyle/checkstyle.xml create mode 100644 lib/sdk/server/config/checkstyle/suppressions.xml create mode 100644 lib/sdk/server/contract-tests/README.md create mode 100644 lib/sdk/server/contract-tests/gradle.properties create mode 100644 lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.jar create mode 100644 lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.properties create mode 100755 lib/sdk/server/contract-tests/gradlew create mode 100644 lib/sdk/server/contract-tests/gradlew.bat create mode 100644 lib/sdk/server/contract-tests/service/build.gradle create mode 100644 lib/sdk/server/contract-tests/service/settings.gradle create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackRepresentation.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackService.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentStoreFixture.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/HookCallbackService.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/MigrationCallbackService.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/Representations.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/SdkClientEntity.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestHook.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestService.java create mode 100644 lib/sdk/server/contract-tests/service/src/main/resources/logback.xml create mode 100644 lib/sdk/server/contract-tests/settings.gradle create mode 100644 lib/sdk/server/gradle.properties create mode 100644 lib/sdk/server/gradle.properties.example create mode 100644 lib/sdk/server/gradle/wrapper/gradle-wrapper.jar create mode 100644 lib/sdk/server/gradle/wrapper/gradle-wrapper.properties create mode 100755 lib/sdk/server/gradlew create mode 100644 lib/sdk/server/gradlew.bat create mode 100644 lib/sdk/server/packaging-test/Makefile create mode 100755 lib/sdk/server/packaging-test/run-non-osgi-test.sh create mode 100755 lib/sdk/server/packaging-test/run-osgi-test.sh create mode 100644 lib/sdk/server/packaging-test/test-app/build.gradle create mode 100644 lib/sdk/server/packaging-test/test-app/settings.gradle create mode 100644 lib/sdk/server/packaging-test/test-app/src/main/java/testapp/JsonSerializationTestData.java create mode 100644 lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestApp.java create mode 100644 lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppGsonTests.java create mode 100644 lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppJacksonTests.java create mode 100644 lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppOsgiEntryPoint.java create mode 100755 lib/sdk/server/scripts/release.sh create mode 100755 lib/sdk/server/scripts/update-version.sh create mode 100644 lib/sdk/server/settings.gradle create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/json/SdkSerializationExtensions.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapper.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ClientContextImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Components.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModel.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResult.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResultAndFlag.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationOptions.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationRecorder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Evaluator.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorInterface.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorTypeConversion.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorWithHooks.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EventBroadcasterImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InputValidatingEvaluator.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDClient.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDConfig.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Loggers.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOp.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOpTracker.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOrigin.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStage.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStageEnforcingEvaluator.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationVariation.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/NoOpEventProcessor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreStatusManager.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SemanticVersion.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicator.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SimpleLRUCache.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StandardEndpoints.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessorEvents.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Util.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Version.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EvaluationSeriesContext.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileData.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceImpl.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceParsing.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/Hook.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HookMetadata.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HooksConfigurationBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/TestData.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/package-info.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClient.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClientInterface.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/package-info.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ApplicationInfo.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentStoreStatusProvider.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentsConfiguration.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ConsistencyCheck.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProvider.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProvider.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeEvent.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeListener.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagTracker.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeEvent.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeListener.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/HttpAuthentication.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/LDClientInterface.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ServiceEndpoints.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/WrapperInfo.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/package-info.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/Migration.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationBuilder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecution.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecutionMode.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationMethodResult.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationSerialOrder.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/package-info.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/package-info.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStore.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStoreTypes.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ClientContext.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ComponentConfigurer.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSource.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSourceUpdateSink.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStore.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreTypes.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreUpdateSink.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DiagnosticDescription.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventProcessor.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventSender.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HookConfiguration.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HttpConfiguration.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/LoggingConfiguration.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/PersistentDataStore.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/SerializationException.java create mode 100644 lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/package-info.java create mode 100644 lib/sdk/server/src/templates/java/com/launchdarkly/sdk/server/Version.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BaseTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapperTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ClientContextImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelDependenciesTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelPreprocessingTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelSerializationTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceUpdatesImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestBase.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestTypes.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreUpdatesImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DefaultFeatureRequestorTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvalResultTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBigSegmentTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBucketingTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorClauseTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorOperatorsParameterizedTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorPrerequisiteTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorRuleTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorSegmentMatchTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTargetTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestBase.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestUtil.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorWithHookTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EventBroadcasterImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FeatureFlagsStateTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagModelDeserializationTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagTrackerImplTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/InMemoryDataStoreTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/JsonHelpersTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientBigSegmentsTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEndToEndTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEvaluationTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEventTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientExternalUpdatesOnlyTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientListenersTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientOfflineTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDConfigTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationBuilderTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationConsistencyCheckTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationExecutionFixture.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationOpTrackerTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStageTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStagesExpectedExecutionTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationVariationTests.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ModelBuilders.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperOtherTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PollingProcessorTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/RolloutRandomizationConsistencyTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SemanticVersionTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEventsTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicatorTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SimpleLRUCacheTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorEventsTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestComponents.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestUtil.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/UtilTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBase.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBaseTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ClientWithFileDataSourceTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/DataLoaderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceAutoUpdateTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTestData.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserJsonTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserTestBase.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserYamlTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HookConfigurationBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/MockPersistentDataStore.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreGenericTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreTestBase.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataWithClientTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/package-info.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/BigSegmentMembershipBuilderTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProviderTypesTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProviderTypesTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreTypesTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/HttpAuthenticationTypesTest.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/package-info.java create mode 100644 lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/package-info.java create mode 100644 lib/sdk/server/src/test/resources/filesource/all-properties.json create mode 100644 lib/sdk/server/src/test/resources/filesource/all-properties.yml create mode 100644 lib/sdk/server/src/test/resources/filesource/flag-only.json create mode 100644 lib/sdk/server/src/test/resources/filesource/flag-only.yml create mode 100644 lib/sdk/server/src/test/resources/filesource/flag-with-duplicate-key.json create mode 100644 lib/sdk/server/src/test/resources/filesource/malformed.json create mode 100644 lib/sdk/server/src/test/resources/filesource/malformed.yml create mode 100644 lib/sdk/server/src/test/resources/filesource/no-data.json create mode 100644 lib/sdk/server/src/test/resources/filesource/segment-only.json create mode 100644 lib/sdk/server/src/test/resources/filesource/segment-only.yml create mode 100644 lib/sdk/server/src/test/resources/filesource/segment-with-duplicate-key.json create mode 100644 lib/sdk/server/src/test/resources/filesource/value-only.json create mode 100644 lib/sdk/server/src/test/resources/filesource/value-only.yml create mode 100644 lib/sdk/server/src/test/resources/filesource/value-with-duplicate-key.json create mode 100644 lib/sdk/server/src/test/resources/logback.xml diff --git a/.github/ISSUE_TEMPLATE/package-server-sdk--bug_report.md b/.github/ISSUE_TEMPLATE/package-server-sdk--bug_report.md new file mode 100644 index 0000000..297d8f2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/package-server-sdk--bug_report.md @@ -0,0 +1,36 @@ +--- +name: 'Bug report for the java-server-sdk package' +about: Create a report to help us improve +title: '' +labels: 'package: java-server-sdk, bug' +assignees: '' +--- + +**Is this a support request?** +This issue tracker is maintained by LaunchDarkly SDK developers and is intended for feedback on the code in this library. If you're not sure whether the problem you are having is specifically related to this library, or to the LaunchDarkly service overall, it may be more appropriate to contact the LaunchDarkly support team; they can help to investigate the problem and will consult the SDK team if necessary. You can submit a support request by going [here](https://support.launchdarkly.com/) and clicking "submit a request", or by emailing support@launchdarkly.com. + +Note that issues filed on this issue tracker are publicly accessible. Do not provide any private account information on your issues. If your problem is specific to your account, you should submit a support request as described above. + +**Describe the bug** +A clear and concise description of what the bug is. + +**To reproduce** +Steps to reproduce the behavior. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Logs** +If applicable, add any log output related to your problem. + +**SDK version** +The version of this SDK that you are using. + +**Language version, developer tools** +For instance, Go 1.11 or Ruby 2.5.3. If you are using a language that requires a separate compiler, such as C, please include the name and version of the compiler too. + +**OS/platform** +For instance, Ubuntu 16.04, Windows 10, or Android 4.0.3. If your code is running in a browser, please also include the browser type and version. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/package-server-sdk--feature_request.md b/.github/ISSUE_TEMPLATE/package-server-sdk--feature_request.md new file mode 100644 index 0000000..24eaf0c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/package-server-sdk--feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request for the java-server-sdk package +about: Suggest an idea for this project +title: '' +labels: 'package: java-server-sdk, enhancement' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I would love to see the SDK [...does something new...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context about the feature request here. \ No newline at end of file diff --git a/.github/actions/contract-tests/action.yml b/.github/actions/contract-tests/action.yml new file mode 100644 index 0000000..fc3a982 --- /dev/null +++ b/.github/actions/contract-tests/action.yml @@ -0,0 +1,17 @@ +name: Contract Tests +description: Runs Contract Tests +inputs: + workspace_path: + description: 'Path to the package.' + required: true + token: + description: 'Github token, used for contract tests' + required: false + default: '' + +runs: + using: composite + steps: + - name: Run contract tests + shell: bash + run: make contract-tests -C ${{ inputs.workspace_path }} diff --git a/.github/workflows/java-server-sdk.yml b/.github/workflows/java-server-sdk.yml new file mode 100644 index 0000000..035d1cb --- /dev/null +++ b/.github/workflows/java-server-sdk.yml @@ -0,0 +1,70 @@ +name: java-server-sdk + +on: + push: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' #Do not need to run CI for markdown changes. + pull_request: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' + +jobs: + test-platforms-and-versions: + strategy: + matrix: + os: [windows-2019, ubuntu-20.04] + javaversion: [8, 11, 17, 19] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + + - name: Shared CI Steps + uses: ./.github/actions/ci + with: + workspace_path: 'lib/sdk/server' + java_version: ${{ matrix.javaversion }} + + - name: Contract Tests + - uses: ./.github/actions/contract-tests + with: + service_project_file: pkgs/sdk/server/contract-tests/TestService.csproj + service_dll_file: pkgs/sdk/server/contract-tests/bin/debug/net6.0/ContractTestService.dll + token: ${{ secrets.GITHUB_TOKEN }} + + test-contract-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Shared CI Steps + uses: ./.github/actions/ci + with: + workspace_path: 'lib/sdk/server' + java_version: 8 + + - name: Contract Tests + uses: ./.github/actions/contract-tests + with: + workspace_path: 'lib/sdk/server' + token: ${{ secrets.GITHUB_TOKEN }} + + test-packaging: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Shared CI Steps + uses: ./.github/actions/ci + with: + workspace_path: 'lib/sdk/server' + java_version: 8 + + - name: Publish Maven Local + shell: bash + run: lib/sdk/server/gradlew publishToMavenLocal -p lib/sdk/server -P LD_SKIP_SIGNING=1 + + - name: Run Packaging Test + shell: bash + run: make all -C lib/sdk/server/packaging-test diff --git a/.github/workflows/manual-publish-docs.yml b/.github/workflows/manual-publish-docs.yml index 4894a74..9cc5052 100644 --- a/.github/workflows/manual-publish-docs.yml +++ b/.github/workflows/manual-publish-docs.yml @@ -9,6 +9,7 @@ on: - lib/java-server-sdk-otel - lib/shared/common - lib/shared/internal + - lib/sdk/server dry_run: description: 'Is this a dry run. If so no docs will be published.' type: boolean diff --git a/.github/workflows/manual-publish.yml b/.github/workflows/manual-publish.yml index f0fb671..2e88854 100644 --- a/.github/workflows/manual-publish.yml +++ b/.github/workflows/manual-publish.yml @@ -10,6 +10,7 @@ on: - lib/java-server-sdk-otel - lib/shared/common - lib/shared/internal + - lib/sdk/server prerelease: description: 'Is this a prerelease.' type: boolean diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 419467f..796a726 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -20,6 +20,38 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} target-branch: ${{ github.ref_name }} + release-server-sdk: + runs-on: ubuntu-latest + needs: release-please + permissions: + id-token: write + contents: write + pull-requests: write + if: ${{ needs.release-please.outputs.package-server-sdk-released == 'true'}} + steps: + - uses: actions/checkout@v4 + + - uses: launchdarkly/gh-actions/actions/release-secrets@release-secrets-v1.1.0 + name: Get secrets + with: + aws_assume_role: ${{ vars.AWS_ROLE_ARN }} + ssm_parameter_pairs: '/production/common/releasing/sonatype/username = SONATYPE_USER_NAME, + /production/common/releasing/sonatype/password = SONATYPE_PASSWORD' + s3_path_pairs: 'launchdarkly-releaser/java/code-signing-keyring.gpg = code-signing-keyring.gpg' + + - uses: ./.github/actions/full-release + with: + workspace_path: lib/sdk/server + dry_run: false + prerelease: false + code_signing_keyring: 'code-signing-keyring.gpg' + signing_key_id: ${{ env.SIGNING_KEY_ID }} + signing_key_passphrase: ${{ env.SIGNING_KEY_PASSPHRASE }} + sonatype_username: ${{ env.SONATYPE_USER_NAME }} + sonatype_password: ${{ env.SONATYPE_PASSWORD }} + aws_role: ${{ vars.AWS_ROLE_ARN }} + token: ${{ secrets.GITHUB_TOKEN }} + release-server-sdk-otel: runs-on: ubuntu-latest needs: release-please diff --git a/.release-please-manifest.json b/.release-please-manifest.json index f93410f..3fe2b07 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,5 +1,6 @@ { "lib/java-server-sdk-otel": "0.1.0", "lib/shared/common": "2.1.1", - "lib/shared/internal": "1.3.0" + "lib/shared/internal": "1.3.0", + "lib/sdk/server": "7.4.1" } diff --git a/.sdk_metadata.json b/.sdk_metadata.json new file mode 100644 index 0000000..501d01b --- /dev/null +++ b/.sdk_metadata.json @@ -0,0 +1,13 @@ +{ + "version": 1, + "sdks": { + "java-server-sdk": { + "name": "Java Server SDK", + "type": "server-side", + "path": "lib/sdk/server", + "languages": [ + "Java" + ] + } + } +} diff --git a/lib/sdk/server/.gitignore b/lib/sdk/server/.gitignore new file mode 100644 index 0000000..cfcafe7 --- /dev/null +++ b/lib/sdk/server/.gitignore @@ -0,0 +1,21 @@ +# Eclipse project files +.classpath +.project +.settings + +# Intellij project files +*.iml +*.ipr +*.iws +.idea/ + +#Gradle +.gradletasknamecache +.gradle/ +build/ +bin/ +out/ +classes/ + +packaging-test/temp/ +benchmarks/lib/ diff --git a/lib/sdk/server/CHANGELOG.md b/lib/sdk/server/CHANGELOG.md new file mode 100644 index 0000000..5d168e9 --- /dev/null +++ b/lib/sdk/server/CHANGELOG.md @@ -0,0 +1,854 @@ +# Change log + +All notable changes to the LaunchDarkly Java SDK will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). + +## [7.4.1] - 2024-05-13 +### Added: +- Adds warning log if excessive start wait time is used. + +### Fixed: +- Improved preprocessing allocations to reduce memory footprint in rare flag configurations. + +## [7.4.0] - 2024-04-26 +### Added: +- This release introduces a Hooks API. Hooks are collections of user-defined callbacks that are executed by the SDK at various points of interest. You can use them to augment the SDK with metrics or tracing. + +## [7.3.0] - 2024-03-14 +### Changed: +- Redact anonymous attributes within feature events +- Always inline contexts for feature events + +## [7.2.6] - 2024-02-09 +### Added: +- LDReactorClient to adapt LDClient to reactive streams. + +## [7.1.1] - 2023-11-14 +### Fixed: +- Fixes NPE when interacting with Context created by copying. (Thanks, [ +pedroafonsodias](https://github.com/launchdarkly/java-sdk-common/pull/15)) + +## [7.1.0] - 2023-11-02 +### Added: +- Added an improved way of setting wrapper information for wrapper SDKs. This functionality is primarily intended for use by LaunchDarkly while developing wrapper SDKs. + +## [7.0.0] - 2023-10-16 +The latest version of this SDK supports the ability to manage migrations or modernizations, using migration flags. You might use this functionality if you are optimizing queries, upgrading to new tech stacks, migrating from one database to another, or other similar technology changes. Migration flags are part of LaunchDarkly's Early Access Program. This feature is available to all LaunchDarkly customers but may undergo additional changes before it is finalized. + +For detailed information about this version, refer to the list below. For information on how to upgrade from the previous version, read the [migration guide](https://docs.launchdarkly.com/sdk/server-side/java/migration-6-to-7). + +### Added: +- A new `Migration` type which provides an out-of-the-box configurable migration framework. +- For more advanced use cases, added new `migrationVariation` and `trackMigration` methods on LDClient. + +### Removed: +- Remove support for `LDUser` in `LDClient` methods. The `LDContext.fromUser` method can be used to convert an `LDUser` to an `LDContext`. In a future version it may be removed. + +## [6.2.1] - 2023-06-29 +### Changed: +- Bumping Guava version to incorporate CVE-2023-2976 fixes. + +## [6.2.0] - 2023-06-13 +### Added: +- Custom headers can now be added to all HTTP requests with `Components.httpConfiguration().addCustomHeader`. + +## [6.1.0] - 2023-04-13 +### Added: +- Support for Payload Filtering in streaming and polling modes. Payload Filtering is a beta feature that allows SDKs to download a subset of environment data, rather than full environments. + +## [6.0.6] - 2023-03-20 +### Fixed: +- Updated snakeyaml to v2.0.0 to address CVE-2022-1471. This vulnerability would only have affected applications that used the FileData feature with a YAML file, assuming an attacker had write access to the filesystem. + +## [6.0.5] - 2023-02-01 +### Fixed: +- Segment bug that returns the default value for variation if multiple flag rules refer to the same segment with a rule. + +## [6.0.4] - 2023-01-10 +### Fixed: +- If the stream connection failed when the SDK had only partially received a piece of JSON data from the stream, the SDK was sometimes logging a misleading error message about invalid JSON in addition to the normal error message about the connection failure. + +## [5.10.7] - 2023-01-09 +### Fixed: +- If the stream connection failed when the SDK had only partially received a piece of JSON data from the stream, the SDK was sometimes logging a misleading error message about invalid JSON in addition to the normal error message about the connection failure. + +## [6.0.3] - 2023-01-06 +### Fixed: +- Fixed unintended error behavior when the SDK is being shut down, if streaming is enabled. The symptom was that 1. the SDK could log a misleading message about a network error (in reality this was just the connection being deliberately closed) and 2. an uncaught exception could be thrown from the worker thread that managed that connection. The uncaught exception would be ignored in a default JVM configuration, but it could have more serious consequences in an application that had configured a default exception handler to be triggered by all uncaught exceptions. + +## [5.10.6] - 2023-01-06 +### Fixed: +- Fixed unintended error behavior when the SDK is being shut down, if streaming is enabled. The symptom was that 1. the SDK could log a misleading message about a network error (in reality this was just the connection being deliberately closed) and 2. an uncaught exception could be thrown from the worker thread that managed that connection. The uncaught exception would be ignored in a default JVM configuration, but it could have more serious consequences in an application that had configured a default exception handler to be triggered by all uncaught exceptions. + +## [6.0.2] - 2023-01-04 +### Fixed: +- Fixed vulnerability [CVE-2022-1471](https://nvd.nist.gov/vuln/detail/CVE-2022-1471) which could allow arbitrary code execution if using `FileDataSource` with a YAML file. (Thanks, [antonmos](https://github.com/launchdarkly/java-server-sdk/pull/289)!) + +## [5.10.5] - 2023-01-04 +### Fixed: +- Fixed vulnerability [CVE-2022-1471](https://nvd.nist.gov/vuln/detail/CVE-2022-1471) which could allow arbitrary code execution if using `FileDataSource` with a YAML file. (Thanks, [antonmos](https://github.com/launchdarkly/java-server-sdk/pull/289)!) + +## [6.0.1] - 2022-12-20 +### Changed: +- The internal implementation of the SSE client for streaming updates has been revised to use a single worker thread instead of two worker threads, reducing thread contention and memory usage. + +## [5.10.4] - 2022-12-20 +### Changed: +- The internal implementation of the SSE client for streaming updates has been revised to use a single worker thread instead of two worker threads, reducing thread contention and memory usage. + +## [6.0.0] - 2022-12-07 +The latest version of this SDK supports LaunchDarkly's new custom contexts feature. Contexts are an evolution of a previously-existing concept, "users." Contexts let you create targeting rules for feature flags based on a variety of different information, including attributes pertaining to users, organizations, devices, and more. You can even combine contexts to create "multi-contexts." + +For detailed information about this version, please refer to the list below. For information on how to upgrade from the previous version, please read the [migration guide](https://docs.launchdarkly.com/sdk/server-side/java/migration-5-to-6). + +### Added: +- In `com.launchDarkly.sdk`, the types `LDContext` and `ContextKind` define the new context model. +- For all SDK methods that took an `LDUser` parameter, there is now an overload that takes an `LDContext`. The SDK still supports `LDUser` for now, but `LDContext` is the preferred model and `LDUser` may be removed in a future version. +- The `TestData` flag builder methods have been extended to support now context-related options, such as matching a key for a specific context type other than "user". + +### Changed _(breaking changes from 6.x)_: +- It was previously allowable to set a user key to an empty string. In the new context model, the key is not allowed to be empty. Trying to use an empty key will cause evaluations to fail and return the default value. +- There is no longer such a thing as a `secondary` meta-attribute that affects percentage rollouts. If you set an attribute with that name in an `LDContext`, it will simply be a custom attribute like any other. +- The `anonymous` attribute in `LDUser` is now a simple boolean, with no distinction between a false state and a null state. +- Types such as `DataStore`, which define the low-level interfaces of LaunchDarkly SDK components and allow implementation of custom components, have been moved out of the `interfaces` subpackage into a new `subsystems` subpackage. Some types have been removed by using generics: for instance, the interface `DataSourceFactory` has been replaced by `ComponentConfigurer`. Application code normally does not refer to these types except possibly to hold a value for a configuration property such as `LDConfig.Builder.dataStore()`, so this change is likely to only affect configuration-related logic. + +### Changed (requirements/dependencies/build): +- The SDK no longer has a dependency on [SLF4J](https://www.slf4j.org/). It will still use SLF4J as the default logging framework _if_ SLF4J is in the classpath, so it is the application's responsibility to declare its own dependency on SLF4J, as any application that uses SLF4J would normally do. +- Applications that use the database integrations for Redis, DynamoDB, or Consul must update to the latest major versions of the corresponding packages (`launchdarkly-java-server-sdk-redis-store`, etc.). + +### Changed (behavioral changes): +- If SLF4J is not in the classpath, the SDK now uses `System.err` as its default logging destination. See "requirements/dependencies/build" above. +- The SDK can now evaluate segments that have rules referencing other segments. +- Analytics event data now uses a new JSON schema due to differences between the context model and the old user model. + +### Removed: +- Removed all types, fields, and methods that were deprecated as of the most recent 5.x release. +- Removed the `secondary` meta-attribute in `LDUser` and `LDUser.Builder`. +- The `alias` method no longer exists because alias events are not needed in the new context model. +- The `inlineUsersInEvents` option no longer exists because it is not relevant in the new context model. + +## [5.10.3] - 2022-10-20 +### Fixed: +- The `pom.xml` specified a dependency on `com.launchdarkly:launchdarkly-logging` even though that library is already contained inside the SDK jar, which could cause extra copies of classes to be in the classpath. The dependency has been removed and the classes are still in the jar. ([#282](https://github.com/launchdarkly/java-server-sdk/issues/282)) + +## [5.10.2] - 2022-09-12 +### Fixed: +- Updated `snakeyaml` to v1.32 to address [CVE-2022-38752](https://nvd.nist.gov/vuln/detail/CVE-2022-38752). This vulnerability would only have affected applications that used the `FileData` feature with a YAML file, assuming an attacker had write access to the filesystem. + +## [5.10.1] - 2022-09-02 +### Fixed: +- Updated `snakeyaml` dependency (used only if using `FileData` with YAML files) to v1.31 to address CVE-2022-25857 ([#275](https://github.com/launchdarkly/java-server-sdk/issues/275)) +- Corrected documentation for default value of `LDConfig.Builder.startWait()`. (Thanks, [richardfearn](https://github.com/launchdarkly/java-server-sdk/pull/274)!) + +## [5.10.0] - 2022-07-28 +The main purpose of this release is to introduce a new logging facade, [`com.launchdarkly.logging`](https://github.com/launchdarkly/java-logging), to streamline how logging works in LaunchDarkly Java and Android code. Previously, the Java SDK always used SLF4J for logging; developers needed to provide an SLF4J configuration externally to specify the actual logging behavior. In this release, the default behavior is still to use SLF4J, but the logging facade can also be configured programmatically to do simple console logging without SLF4J, or to forward output to another framework such as `java.util.logging`, or to multiple destinations, or to capture output in memory. In a future major version release, the default behavior may be changed so that the SDK does not require SLF4J as a dependency. + +### Added: +- In [`LoggingConfigurationBuilder`](https://javadoc.io/doc/com.launchdarkly/launchdarkly-java-server-sdk/latest/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilder.html), the new methods `adapter` and `level`, for the new logging capabilities mentioned above. +- `TestData.FlagBuilder.variationForAll` and `valueForAll`: new names for the deprecated methods listed below. + +### Deprecated: +- `TestData.FlagBuilder.variationForAllUsers` and `valueForAllUsers`: These methods are being renamed because in the future, there will be other possible kinds of evaluation inputs that are not users, and these test methods will apply equally to those. + +## [5.9.3] - 2022-07-28 +### Changed: +- Updated `okhttp` dependency to version 4.9.3 to address a [reported vulnerability](https://security.snyk.io/vuln/SNYK-JAVA-COMSQUAREUPOKHTTP3-2958044) in earlier versions of that library, which could have allowed potentially sensitive information to be written to the log if you had put that information in a custom header value that contained an illegal character (see release notes for Java SDK [5.6.0](https://github.com/launchdarkly/java-server-sdk/releases/tag/5.6.0)). ([#271](https://github.com/launchdarkly/java-server-sdk/issues/271)) + +## [5.9.2] - 2022-07-20 +### Changed: +- Further optimizations to reduce how many short-lived objects the SDK produces as a side effect of flag evaluations, causing less work for the garbage collector in applications that evaluate flags very frequently. + +## [5.9.1] - 2022-06-30 +### Changed: +- The SDK now uses memory more efficiently when parsing JSON flag/segment configuration data that it receives from LaunchDarkly, so there will be a less sizable transient memory usage spike if the flag/segment data is very large. This does not affect the baseline memory requirements for storing the data after it is received. +- The SDK now produces fewer short-lived objects as a side effect of flag evaluations, causing less work for the garbage collector in applications that evaluate flags very frequently. + +## [5.9.0] - 2022-05-26 +### Added: +- `LDConfig.Builder.serviceEndpoints` provides a simpler way of setting custom service base URIs, if you are connecting to a LaunchDarkly Relay Proxy instance, a private LaunchDarkly instance, or a test fixture. Previously, this required setting a BaseURI property for each individual service (streaming, events, etc.). If using the Relay Proxy, simply remove any BaseURI calls in your SDK configuration and call `serviceEndpoints(Components.serviceEndpoints().relayProxy(myRelayProxyUri))` on the configuration builder. + +### Fixed: +- Fixed documentation comments for the variation methods to clarify that `defaultValue` is used if there is an error fetching the variation or the flag doesn't exist, not when the flag is disabled. + +## [5.8.1] - 2022-05-04 +### Fixed: +- Calling `stringVariationDetail` with a flag whose variations are _not_ strings, and passing `null` as the default value parameter, would result in an `EvaluationDetail` that had a null value but had a regular evaluation reason and variation index (whatever those would be for a successful evaluation of that flag). It now correctly returns a `WRONG_TYPE` error reason, and `NO_VARIATION` for the variation index. +- If a field in `Config.ApplicationInfo` is set to a string longer than 64 characters, the SDK will now log a warning and discard it, since the LaunchDarkly services cannot process such strings for these fields. + +## [5.8.0] - 2022-04-18 +### Added: +- `LDConfig.Builder.applicationInfo()`, for configuration of application metadata that may be used in LaunchDarkly analytics or other product features. This does not affect feature flag evaluations. + +## [5.7.1] - 2022-02-04 +### Fixed: +- Fixed a packaging issue causing `launchdarkly-java-sdk-common` to be included as a dependency in the SDK's generated `pom` file. This introduces duplicate classes in the application's `jar` file. The duplicate classes can prevent the SDK's custom serialization logic from being used, due to not correctly referencing the shaded class names. ([#258](hhttps://github.com/launchdarkly/java-server-sdk/issues/258)) + +## [5.7.0] - 2022-01-28 +### Added: +- The SDK now supports evaluation of Big Segments. An Early Access Program for creating and syncing Big Segments from customer data platforms is available to enterprise customers. + +### Changed: +- CI builds now include a cross-platform test suite implemented in https://github.com/launchdarkly/sdk-test-harness. This covers many test cases that are also implemented in unit tests, but may be extended in the future to ensure consistent behavior across SDKs in other areas. + +## [5.6.7] - 2022-01-28 +### Fixed: +- When using `allFlagsState` to produce bootstrap data for the JavaScript SDK, the Java SDK was not returning the correct metadata for evaluations that involved an experiment. As a result, the analytics events produced by the JavaScript SDK did not correctly reflect experimentation results. +- In feature flag rules using the `before` and `after` date operators, if two ISO-8601 string values were compared that represented the exact same absolute date in different time zones (such as `2000-01-01T08:00:00Z` and `2000-01-01T00:00:00-08:00`), the SDK wrongly treated them as unequal. This did not affect strings that represented different absolute dates, which were always compared correctly. The SDK now handles both cases correctly. +- The `com.launchdarkly.sdk.json` serialization methods were sometimes omitting JSON object properties in cases where it would have been more correct to show the property with a `null` value. This mainly affected JSON data produced by `LDClient.allFlagsState()`, where the presence of a flag key with a `null` value would indicate that the flag existed but could not be evaluated due to an error, as opposed to the flag not existing. + +## [5.6.6] - 2022-01-07 +### Fixed: +- The SDK build process was accidentally including a `module-info.class` file in the jar that was from a different module (`jdk.zipfs`). This has been removed. The SDK does not currently have Java module metadata. ([#252](https://github.com/launchdarkly/java-server-sdk/issues/252)) + +## [5.6.5] - 2021-12-08 +### Fixed: +- If it received an HTTP 401 or 403 error from LaunchDarkly, indicating that the SDK key was invalid, the SDK would still continue trying to send diagnostic events. ([#250](https://github.com/launchdarkly/java-server-sdk/issues/250)) + +## [5.6.4] - 2021-11-30 +### Fixed: +- Updated Gson to 2.8.9 for a [security bugfix](https://github.com/google/gson/pull/1991). + +## [5.6.3] - 2021-10-12 +### Fixed: +- If Java's default locale was not US/English, the SDK would fail to parse dates in the standard RFC1123 format in HTTP responses. The symptoms were that the warning `Received invalid Date header from events service` would appear in logs, and event debugging might not stop at the correct time if the system clock was different from the LaunchDarkly services' clock (which is why the SDK checks the Date header). + +## [5.6.2] - 2021-08-09 +### Fixed: +- `FeatureFlagsStateBuilder.build()` is now public. The other builder methods were made public in v5.6.0, but were not useful because `build()` was still package-private. + +## [5.6.1] - 2021-07-07 +This release fixes two packaging errors that could produce unwanted Java dependency behavior, as described below. There are no changes to the SDK's functionality in this release, and you do not need to modify your code or your build. + +### Fixed: +- Two Jackson packages (`com.fasterxml.jackson.core:jackson-core`, `com.fasterxml.jackson.core:jackson-databind`) were mistakenly listed as dependencies in the SDK's metadata, causing those packages to be downloaded and included in the classpath even if you were not using them. The SDK does not require Jackson, even though it can optionally be made to use it. This was meant to be fixed in the 5.5.0 release as previously described in the changelog, but was not. +- The SDK jar manifest contained a `Class-Path` attribute that referenced SLF4J and Jackson jars at a specific relative file path. This could cause a warning to be printed if those jars did not exist at that file path, even if they were elsewhere in your classpath. The `Class-Path` attribute is mainly useful for independently-deployable application jars and is not useful here; it has been removed. ([#240](https://github.com/launchdarkly/java-server-sdk/issues/240)) + +## [5.6.0] - 2021-07-02 +### Added: +- The `builder()` method in `FeatureFlagsState`, for creating instances of that class (most likely useful in test code). ([#234](https://github.com/launchdarkly/java-server-sdk/issues/234)) + +### Fixed: +- If you called the `LDClient` constructor with an SDK key that contained a character less than `0x20` or greater than `0x7e`, it would throw an `IllegalArgumentException` that contained the full SDK key string in its message. Since the string might contain a real key (if for instance the application had read the SDK key from configuration data that included a newline character, and neglected to trim the newline), exposing the value in an exception message that might end up in a log was a security risk. This has been changed so that the exception message only says the key contains an invalid character, but does not include the value. (The underlying exception behavior is part of the OkHttp library, so be aware that if you inject any custom headers with illegal characters into your HTTP configuration, their values might still be exposed in this way.) +- In polling mode, the SDK would attempt to reconnect to the LaunchDarkly streaming service even if it received an HTTP 401 error. It should reconnect for other errors such as 503, but 401 indicates that the SDK key is invalid and a retry cannot succeed; the SDK did have logic to permanently stop the connection in this case, but it was not working. (This is equivalent to the bug that was fixed in 5.5.1, but for polling mode.) +- Fixed documentation comments for `FileData` to clarify that you should _not_ use `offline` mode in conjunction with `FileData`; instead, you should just turn off events if you don't want events to be sent. Turning on `offline` mode will disable `FileData` just as it disables all other data sources. ([#235](https://github.com/launchdarkly/java-server-sdk/issues/235)) + +## [5.5.1] - 2021-06-24 +### Fixed: +- The SDK was attempting to reconnect to the LaunchDarkly streaming service even if it received an HTTP 401 error. It should reconnect for other errors such as 503, but 401 indicates that the SDK key is invalid and a retry cannot succeed; the SDK did have logic to permanently stop the connection in this case, but it was not working. + +## [5.5.0] - 2021-06-17 +### Added: +- The SDK now supports the ability to control the proportion of traffic allocation to an experiment. This works in conjunction with a new platform feature now available to early access customers. + +### Fixed: +- Removed unnecessary dependencies on Jackson packages in `pom.xml`. The SDK does not require Jackson to be present, although it does provide convenience methods for interacting with Jackson if it is present. + +## [5.4.1] - 2021-06-10 +### Fixed: +- If a rule clause in a feature flag or user segment had a JSON `null` as a match value, the SDK would fail to parse the JSON data, causing an overall inability to receive flag data from LaunchDarkly as long as this condition existed. This is an abnormal condition since it is not possible to match any user attribute against a null value, but it is technically allowed by the JSON schema. The SDK will now correctly parse the data. + +## [5.4.0] - 2021-04-22 +### Added: +- Added the `alias` method to `LDClient`. This can be used to associate two user objects for analytics purposes with an alias event. +- In `com.launchdarkly.sdk.json.LDGson`, added convenience methods `valueToJsonElement` and `valueMapToJsonElementMap` for applications that use Gson types. +- In `com.launchdarkly.sdk.LDValue`, added convenience method `arrayOf()`. + +### Changed: +- In `com.launchdarkly.sdk.json`, the implementations of `LDGson.typeAdapters` and `LDJackson.module` have been changed for better efficiency in deserialization. Instead of creating an intermediate string representation and re-parsing that, they now have a more direct way for the internal deserialization logic to interact with the streaming parser in the application's Gson or Jackson instance. + +### Fixed: +- `Gson.toJsonTree` now works with LaunchDarkly types, as long as you have configured it as described in `com.launchdarkly.sdk.json.LDGson`. Previously, Gson was able to convert these types to and from JSON string data, but `toJsonTree` did not work due to a [known issue](https://github.com/google/gson/issues/1289) with the `JsonWriter.jsonValue` method; the SDK code no longer uses that method. +- `LDValue.parse()` now returns `LDValue.ofNull()` instead of an actual null reference if the JSON string is `null`. +- Similarly, when deserializing an `EvaluationDetail` from JSON, if the `value` property is `null`, it will now translate this into `LDValue.ofNull()` rather than an actual null reference. + +## [5.3.1] - 2021-04-08 +### Fixed: +- Updated the `commons-codec` dependency from 1.10 to 1.15. There was a [known vulnerability](https://github.com/apache/commons-codec/commit/48b615756d1d770091ea3322eefc08011ee8b113) in earlier versions of `commons-codec`-- although it did not affect this SDK, since it involved base64 decoding, which is not a thing the SDK ever does. + +## [5.3.0] - 2021-03-09 +### Added: +- When using the file data source, `FileDataSourceBuilder.duplicateKeysHandling` allows you to specify that duplicate flag keys should _not_ cause an error as they normally would. [(#226)](https://github.com/launchdarkly/java-server-sdk/issues/226) + +## [5.2.3] - 2021-02-19 +### Fixed: +- The flag update notification mechanism in `FlagTracker` did not work when the data source was `FileData`. This has been fixed so that whenever `FileData` reloads the data file(s) due to a file being modified, it signals that the flags were updated. The SDK will behave as if every flag was updated in this case, regardless of which part of the file data was actually changed, but it was already the case that a flag change event did not necessarily mean there was any _significant_ change to the flag. You can use `addFlagValueChangeListener` (as opposed to `addFlagChangeListener`) to be notified only of changes that affect a specific flag's value for a specific user. ([#224](https://github.com/launchdarkly/java-server-sdk/issues/224)) + +## [5.2.2] - 2021-01-15 +### Fixed: +- Updated Guava from `28.2-jre` to `30.1-jre` to resolve [CVE-2020-8908](https://nvd.nist.gov/vuln/detail/CVE-2020-8908). This CVE did not affect the SDK as the SDK does not use the vulnerable functionality. + +## [5.2.1] - 2020-12-01 +### Fixed: +- `TestData.FlagBuilder` did not copy flags' targeting rules when applying an update to an existing test flag. ([#220](https://github.com/launchdarkly/java-server-sdk/issues/220)) + +## [5.2.0] - 2020-10-09 +### Added: +- Add support for setting a `socketFactory` in the `HttpConfiguration` builder. This is used to create sockets when initiating HTTP connections. For TLS connections `sslSocketFactory` is used. + +## [5.1.1] - 2020-09-30 +### Fixed: +- The `com.launchdarkly.sdk.json.LDJackson` class was not usable in the default distribution of the SDK (nor in the `all` distribution) because Jackson class names had been incorrectly modified by the shading step in the build. ([#213](https://github.com/launchdarkly/java-server-sdk/issues/213)) +- Setting custom base URIs for the streaming, polling, or events service could produce incorrect results if the URI had a context path. (Thanks, [msafari](https://github.com/launchdarkly/java-server-sdk/pull/212)!) +- Corrected format strings in some log messages. ([#211](https://github.com/launchdarkly/java-server-sdk/issues/211)) + +## [5.1.0] - 2020-09-04 +### Added: +- The `TestData` class in `com.launchdarkly.sdk.server.integrations` is a new way to inject feature flag data programmatically into the SDK for testing—either with fixed values for each flag, or with targets and/or rules that can return different values for different users. Unlike `FileData`, this mechanism does not use any external resources, only the data that your test code has provided. + +### Fixed: +- In polling mode, the log message "LaunchDarkly client initialized" was appearing after every successful poll request. It should only appear once. + +## [5.0.5] - 2020-09-03 +### Fixed: +- Bump SnakeYAML from 1.19 to 1.26 to address CVE-2017-18640. The SDK only parses YAML if the application has configured the SDK with a flag data file, so it's unlikely this CVE would affect SDK usage as it would require configuration and access to a local file. + + +## [5.0.4] - 2020-09-01 +### Fixed: +- Updated the version of OkHttp contained within the SDK from 4.5.0 to 4.8.1, to address multiple [known issues](https://square.github.io/okhttp/changelog/) including an incompatibility with OpenJDK 8.0.252 under some conditions. ([#204](https://github.com/launchdarkly/java-server-sdk/issues/204)) + +## [5.0.3] - 2020-08-18 +### Fixed: +- A packaging issue with Kotlin dependencies caused problems with IntelliJ code completion and code highlighting. ([#201](https://github.com/launchdarkly/java-server-sdk/issues/201)) + +## [5.0.2] - 2020-06-25 +### Changed: +- It is no longer necessary to set `StreamingDataSourceBuilder.pollingBaseURI` if you are also setting `baseURI`. This is due to a change in how the LaunchDarkly streaming service works. The setter method still exists, but no longer has any effect and will be deprecated in a future release. + +### Fixed: +- In polling mode, if a poll request failed due to a temporary network problem but then a subsequent request succeeded, `DataSourceStatusProvider` was continuing to report the status as `INTERRUPTED` when it should have been restored to `VALID`. +- In polling mode, the SDK was unnecessarily re-storing the flag data in the data store even if it had not changed since the last poll request. This would cause unnecessary updates when using a database. +- In polling mode, temporary files used for HTTP caching (in the system temporary directory) were not being cleaned up when the client was closed. +- Fixed incorrect sample code in the documentation comment for `FlagValueChangeListener`. + +## [5.0.1] - 2020-06-19 +### Fixed: +- Fixed a bug that could cause worker threads for the EventSource stream to persist after closing the client, if the client had shut down the stream due to detecting an invalid SDK key. + +## [5.0.0] - 2020-06-02 +This is a major rewrite that introduces a cleaner API design, adds new features, and makes the SDK code easier to maintain and extend. See the [Java 4.x to 5.0 migration guide](https://docs.launchdarkly.com/sdk/server-side/java/migration-4-to-5) for an in-depth look at the changes in this version; the following is a summary. + +(For early adopters who have used the the 5.0.0-rc2 beta release: some things have changed between 5.0.0-rc2 and this full release. The [5.0.0-rc2 release notes](https://github.com/launchdarkly/java-server-sdk/releases/tag/5.0.0-rc2) have been updated with a section describing these changes.) + +### Added: +- You can tell the SDK to notify you whenever a feature flag's configuration has changed (either in general, or in terms of its result for a specific user), using `LDClient.getFlagTracker()`. ([#83](https://github.com/launchdarkly/java-server-sdk/issues/83)) +- You can monitor the status of the SDK's data source (which normally means the streaming connection to the LaunchDarkly service) with `LDClient.getDataSourceStatusProvider()`. This allows you to check the current connection status, and to be notified if this status changes. ([#184](https://github.com/launchdarkly/java-server-sdk/issues/184)) +- You can monitor the status of a persistent data store with `LDClient.getDataStoreStatusProvider()`. This allows you to check whether database updates are succeeding, to be notified if this status changes, and to get caching statistics. +- The `FileData` tool now supports reading flag data from a classpath resource as if it were a data file. See `FileDataSourceBuilder.classpathResources()`. ([#193](https://github.com/launchdarkly/java-server-sdk/issues/193)) +- `LDConfig.Builder.logging()` is a new configuration category for options related to logging. Currently the only such option is `escalateDataSourceOutageLoggingAfter`, which controls the new connection failure logging behavior described below. +- `LDConfig.Builder.threadPriority()` allows you to set the priority for worker threads created by the SDK. +- The `UserAttribute` class provides a less error-prone way to refer to user attribute names in configuration, and can also be used to get an arbitrary attribute from a user. +- The `LDGson` and `LDJackson` classes allow SDK classes like `LDUser` to be easily converted to or from JSON using the popular Gson and Jackson frameworks. + +### Changed (requirements/dependencies/build): +- The minimum supported Java version is now 8. +- The SDK no longer exposes a Gson dependency or any Gson types. +- Third-party libraries like Gson, Guava, and OkHttp that are used internally by the SDK have been updated to newer versions since Java 7 compatibility is no longer required. ([#158](https://github.com/launchdarkly/java-server-sdk/issues/158)) +- Code coverage reports and JMH benchmarks are now generated in every build. Unit test coverage of the entire SDK codebase has been greatly improved. + +### Changed (API changes): +- Package names have changed: the main SDK classes are now in `com.launchdarkly.sdk` and `com.launchdarkly.sdk.server`. +- Many rarely-used classes and interfaces have been moved out of the main SDK package into `com.launchdarkly.sdk.server.integrations` and `com.launchdarkly.sdk.server.interfaces`. +- The type `java.time.Duration` is now used for configuration properties that represent an amount of time, instead of using a number of milliseconds or seconds. +- `LDClient.initialized()` has been renamed to `isInitialized()`. +- `LDClient.intVariation()` and `doubleVariation()` now return `int` and `double`, not the nullable `Integer` and `Double`. +- `EvaluationDetail.getVariationIndex()` now returns `int` instead of `Integer`. +- `EvaluationReason` is now a single concrete class rather than an abstract base class. +- The component interfaces `FeatureStore` and `UpdateProcessor` have been renamed to `DataStore` and `DataSource`. The factory interfaces for these components now receive SDK configuration options in a different way that does not expose other components' configurations to each other. +- The `PersistentDataStore` interface for creating your own database integrations has been simplified by moving all of the serialization and caching logic into the main SDK code. + +### Changed (behavioral changes): +- SLF4J logging now uses a simpler, more stable set of logger names instead of using the names of specific implementation classes that are subject to change. General messages are logged under `com.launchdarkly.sdk.server.LDClient`, while messages about specific areas of functionality are logged under that name plus `.DataSource` (streaming, polling, file data, etc.), `.DataStore` (database integrations), `.Evaluation` (unexpected errors during flag evaluations), or `.Events` (analytics event processing). +- If analytics events are disabled with `Components.noEvents()`, the SDK now avoids generating any analytics event objects internally. Previously they were created and then discarded, causing unnecessary heap churn. +- Network failures and server errors for streaming or polling requests were previously logged at `ERROR` level in most cases but sometimes at `WARN` level. They are now all at `WARN` level, but with a new behavior: if connection failures continue without a successful retry for a certain amount of time, the SDK will log a special `ERROR`-level message to warn you that this is not just a brief outage. The amount of time is one minute by default, but can be changed with the new `logDataSourceOutageAsErrorAfter` option in `LoggingConfigurationBuilder`. ([#190](https://github.com/launchdarkly/java-server-sdk/issues/190)) +- Many internal methods have been rewritten to reduce the number of heap allocations in general. +- Evaluation of rules involving regex matches, date/time values, and semantic versions, has been speeded up by pre-parsing the values in the rules. +- Evaluation of rules involving an equality match to multiple values (such as "name is one of X, Y, Z") has been speeded up by converting the list of values to a `Set`. +- The number of worker threads maintained by the SDK has been reduced so that most intermittent background tasks, such as listener notifications, event flush timers, and polling requests, are now dispatched on a single thread. The delivery of analytics events to LaunchDarkly still has its own thread pool because it is a heavier-weight task with greater need for concurrency. +- In polling mode, the poll requests previously ran on a dedicated worker thread that inherited its priority from the application thread that created the SDK. They are now on the SDK's main worker thread, which has `Thread.MIN_PRIORITY` by default (as all the other SDK threads already did) but the priority can be changed as described above. +- When using a persistent data store such as Redis, if there is a database outage, the SDK will wait until the end of the outage and then restart the stream connection to ensure that it has the latest data. Previously, it would try to restart the connection immediately and continue restarting if the database was still not available, causing unnecessary overhead. + +### Fixed: +- `LDClient.version()` previously could not be used if the SDK classes were not packaged in their original jar. It now works correctly regardless of deployment details. + +### Removed: +- All types and methods that were deprecated as of Java SDK 4.13.0 have been removed. This includes many `LDConfig.Builder()` methods, which have been replaced by the modular configuration syntax that was already added in the 4.12.0 and 4.13.0 releases. See the [migration guide](https://docs.launchdarkly.com/sdk/server-side/java/migration-4-to-5) for details on how to update your configuration code if you were using the older syntax. +- The Redis integration is no longer built into the main SDK library. See: https://github.com/launchdarkly/java-server-sdk-redis +- The deprecated New Relic integration has been removed. + +## [4.14.4] - 2020-09-28 +### Fixed: +- Restored compatibility with Java 7. A transitive dependency that required Java 8 had accidentally been included, and the CI build did not detect this because the tests were being run in Java 8 even though the compiler target was 7. CI builds now verify that the SDK really can run in Java 7. This fix is only for 4.x; the 5.x SDK still does not support Java 7. +- Bumped OkHttp version to 3.12.12 to avoid a crash on Java 8u252. +- Removed an obsolete comment that said the `trackMetric` method was not yet supported by the LaunchDarkly service; it is. + +## [4.14.3] - 2020-09-03 +### Fixed: +- Bump SnakeYAML from 1.19 to 1.26 to address CVE-2017-18640. The SDK only parses YAML if the application has configured the SDK with a flag data file, so it's unlikely this CVE would affect SDK usage as it would require configuration and access to a local file. + +## [4.14.2] - 2020-09-01 +### Fixed: +- Updated the version of OkHttp contained within the SDK from 3.12.10 to 3.14.9, to address multiple [known issues](https://square.github.io/okhttp/changelog_3x/) including an incompatibility with OpenJDK 8.0.252 under some conditions. ([#204](https://github.com/launchdarkly/java-server-sdk/issues/204)) + +## [4.14.1] - 2020-08-04 +### Fixed: +- Deserializing `LDUser` from JSON using Gson resulted in an object that had nulls in some fields where nulls were not expected, which could cause null pointer exceptions later. While there was no defined behavior for deserializing users in the 4.x SDK (it is supported in 5.0 and above), it was simple to fix. Results of deserializing with any other JSON framework are undefined. ([#199](https://github.com/launchdarkly/java-server-sdk/issues/199)) + +## [4.14.0] - 2020-05-13 +### Added: +- `EventSender` interface and `EventsConfigurationBuilder.eventSender()` allow you to specify a custom implementation of how event data is sent. This is mainly to facilitate testing, but could also be used to store and forward event data. + +### Fixed: +- Changed the Javadoc comments for the `LDClient` constructors to provide a better explanation of the client's initialization behavior. + +## [4.13.0] - 2020-04-21 +### Added: +- The new methods `Components.httpConfiguration()` and `LDConfig.Builder.http()`, and the new class `HttpConfigurationBuilder`, provide a subcomponent configuration model that groups together HTTP-related options such as `connectTimeoutMillis` and `proxyHost` - similar to how `Components.streamingDataSource()` works for streaming-related options or `Components.sendEvents()` for event-related options. The individual `LDConfig.Builder` methods for those options will still work, but are deprecated and will be removed in version 5.0. +- `EvaluationReason` now has getter methods like `getRuleIndex()` that were previously only on specific reason subclasses. The subclasses will be removed in version 5.0. + +### Changed: +- In streaming mode, the SDK will now drop and restart the stream connection if either 1. it receives malformed data (indicating that some data may have been lost before reaching the application) or 2. you are using a database integration (a persistent feature store) and a database error happens while trying to store the received data. In both cases, the intention is to make sure updates from LaunchDarkly are not lost; restarting the connection causes LaunchDarkly to re-send the entire flag data set. This makes the Java SDK's behavior consistent with other LaunchDarkly server-side SDKs. + +(Note that this means if there is a sustained database outage, you may see repeated reconnections as the SDK receives the data from LaunchDarkly again, tries to store it again, and gets another database error. Starting in version 5.0, there will be a more efficient mechanism in which the stream will only be restarted once the database becomes available again; that is not possible in this version because of limitations in the feature store interface.) + +### Fixed: +- Network errors during analytics event delivery could cause an unwanted large exception stacktrace to appear as part of the log message. This has been fixed to be consistent with the SDK's error handling in general: a brief message is logged at `ERROR` or `WARN` level, and the stacktrace only appears if you have enabled `DEBUG` level. + +### Deprecated: +- `LDConfig.Builder` methods `connectTimeout`, `connectTimeoutMillis`, `proxyHost`, `proxyPort`, `proxyUsername`, `proxyPassword`, `sslSocketFactory`, `wrapperName`, and `wrapperVersion`. Use `LDConfig.Builder.http()` and `Components.httpConfiguration()` instead. +- `EvaluationReason` subclasses. Use the property getter methods on `EvaluationReason` instead. +- The built-in New Relic integration will be removed in the 5.0 release. Application code is not affected by this change since the integration was entirely reflection-based and was not exposed in the public API. + +## [4.12.1] - 2020-03-20 +### Changed: +- Improved the performance of the in-memory flag data store by using an immutable map that is atomically replaced on updates, so reads do not need a lock. +- Improved the performance of flag evaluations when there is a very long user target list in a feature flag or user segment, by representing the user key collection as a Set rather than a List. +- Updated OkHttp version to 3.12.10 (the latest version that still supports Java 7). + + +## [4.12.0] - 2020-01-30 +The primary purpose of this release is to introduce newer APIs for the existing SDK features, corresponding to how they will work in the upcoming 5.0 release. The corresponding older APIs are now deprecated; switching from them to the newer ones now will facilitate migrating to 5.0 in the future. See below for details. + +This release also adds diagnostic reporting as described below. + +Note: if you are using the LaunchDarkly Relay Proxy to forward events, update the Relay to version 5.10.0 or later before updating to this Java SDK version. + +### Added: +- The SDK now periodically sends diagnostic data to LaunchDarkly, describing the version and configuration of the SDK, the architecture and version of the runtime platform, and performance statistics. No credentials, hostnames, or other identifiable values are included. This behavior can be disabled with `LDConfig.Builder.diagnosticOptOut()` or configured with `EventProcessorBuilder.diagnosticRecordingInterval()`. +- Previously, most configuration options were set by setter methods in `LDConfig.Builder`. These are being superseded by builders that are specific to one area of functionality: for instance, `Components.streamingDataSource()` and `Components.pollingDataSource()` provide builders/factories that have options specific to streaming or polling, and the SDK's many options related to analytics events are now in a builder returned by `Components.sendEvents()`. Using this newer API makes it clearer which options are for what, and makes it impossible to write contradictory configurations like `.stream(true).pollingIntervalMillis(30000)`. +- The component "feature store" will be renamed to "data store". The interface for this is still called `FeatureStore` for backward compatibility, but `LDConfig.Builder` now has a `dataStore` method. +- There is a new API for specifying a _persistent_ data store (usually a database integration). This is now done using the new method `Components.persistentDataStore` and one of the new integration factories in the new package `com.launchdarkly.client.integrations`. The `Redis` class in that package provides the Redis integration; the next releases of the Consul and DynamoDB integrations will use the same semantics. +- The component "update processor" will be renamed to "data source". Applications normally do not need to use this interface except for the "file data source" testing component; the new entry point for this is `FileData` in `com.launchdarkly.client.integrations`. +- It is now possible to specify an infinite cache TTL for persistent feature stores by setting the TTL to a negative number, in which case the persistent store will never be read unless the application restarts. Use this mode with caution as described in the comment for `PersistentDataStoreBuilder.cacheForever()`. +- New `LDConfig.Builder` setters `wrapperName()` and `wrapperVersion()` allow a library that uses the Java SDK to identify itself for usage data if desired. + +### Fixed: +- The Redis integration could fail to connect to Redis if the application did not explicitly specify a Redis URI. This has been fixed so it will default to `redis://localhost:6379` as documented. +- The `getCacheStats()` method on the deprecated `RedisFeatureStore` class was not working (the statistics were always zero). Note that in the newer persistent store API added in this version, there is now a different way to get cache statistics. + +### Deprecated: +- Many `LDConfig.Builder` methods: see notes under "Added", and the per-method notes in Javadoc. +- `RedisFeatureStore` and `RedisFeatureStoreBuilder` in `com.launchdarkly.client`: see `Redis` in `com.launchdarkly.client.integrations`. +- `FileComponents` in `com.launchdarkly.client.files`: see `FileData` in `com.launchdarkly.client.integrations`. +- `FeatureStoreCacheConfig`: see `PersistentDataStoreBuilder`. + + +## [4.11.1] - 2020-01-17 +### Fixed: +- Flag evaluation would fail (with a NullPointerException that would be logged, but not thrown to the caller) if a flag rule used a semantic version operator and the specified user attribute did not have a string value. +- The recently-added exception property of `EvaluationReason.Error` should not be serialized to JSON when sending reasons in analytics events, since the LaunchDarkly events service does not process that field and the serialization of an exception can be lengthy. The property is only meant for programmatic use. +- The SDK now specifies a uniquely identifiable request header when sending events to LaunchDarkly to ensure that events are only processed once, even if the SDK sends them two times due to a failed initial attempt. _(An earlier release note incorrectly stated that this behavior was added in 4.11.0. It is new in this release.)_ + +## [4.11.0] - 2020-01-16 +### Added: +- When an `EvaluationReason` indicates that flag evaluation failed due to an unexpected exception (`getKind()` is `ERROR`, and `EvaluationReason.Error.getErrorKind()` is `EXCEPTION`), you can now examine the underlying exception via `EvaluationReason.Error.getException()`. ([#180](https://github.com/launchdarkly/java-server-sdk/issues/180)) + +## [4.10.1] - 2020-01-06 +### Fixed: +- The `pom.xml` dependencies were incorrectly specifying `runtime` scope rather than `compile`, causing problems for applications that did not have their own dependencies on Gson and SLF4J. ([#151](https://github.com/launchdarkly/java-client/issues/151)) + +## [4.10.0] - 2019-12-13 +### Added: +- Method overloads in `ArrayBuilder`/`ObjectBuilder` to allow easily adding values as booleans, strings, etc. rather than converting them to `LDValue` first. + +### Changed: +- The SDK now generates fewer ephemeral objects on the heap from flag evaluations, by reusing `EvaluationReason` instances that have the same properties. + +### Fixed: +- In rare circumstances (depending on the exact data in the flag configuration, the flag's salt value, and the user properties), a percentage rollout could fail and return a default value, logging the error "Data inconsistency in feature flag ... variation/rollout object with no variation or rollout". This would happen if the user's hashed value fell exactly at the end of the last "bucket" (the last variation defined in the rollout). This has been fixed so that the user will get the last variation. + +### Deprecated: +- Deprecated `LDCountryCode`, `LDUser.Builder.country(LDCountryCode)`, and `LDUser.Builder.privateCountry(LDCountryCode)`. `LDCountryCode` will be removed in the next major release, for setting the `country` user property, applications should use `LDUser.Builder.country(String)` and `LDUser.Builder.privateCountry(String)` instead. +- `SegmentRule` is an internal implementation class that was accidentally made public. +- `NullUpdateProcessor` should not be referenced directly and will be non-public in the future; use the factory methods in `Components` instead. + + +## [4.9.1] - 2019-11-20 +### Changed: +- Improved memory usage and performance when processing analytics events: the SDK now encodes event data to JSON directly, instead of creating intermediate objects and serializing them via reflection. + +### Fixed: +- A bug introduced in version 4.9.0 was causing event delivery to fail if a user was created with the `User(string)` constructor, instead of the builder pattern. + + +## [4.9.0] - 2019-10-18 +This release adds the `LDValue` class (in `com.launchdarkly.client.value`), which is a new abstraction for all of the data types supported by the LaunchDarkly platform. Since those are the same as the JSON data types, the SDK previously used the Gson classes `JsonElement`, `JsonObject`, etc. to represent them. This caused two problems: the public APIs are dependent on Gson, and the Gson object and array types are mutable so it was possible to accidentally modify values that are being used elsewhere in the SDK. + +While the SDK still uses Gson internally, all references to Gson types in the API are now deprecated in favor of equivalent APIs that use `LDValue`. Developers are encouraged to migrate toward these as soon as possible; the Gson classes will be removed from the API in a future major version. If you are only using primitive types (boolean, string, etc.) for your feature flags and user attributes, then no changes are required. + +There are no other changes in this release. + +### Added: +- `LDValue` (see above). +- The new `jsonValueVariation` and `jsonValueVariationDetail` methods in `LDClient`/`LDClientInterface` are equivalent to `JsonVariation` and `JsonVariationDetail`, but use `LDValue`. + +### Deprecated: +- In `LDClient`/`LDClientInterface`: `jsonVariation`/`jsonVariationDetail`. Use `jsonValueVariation`/`jsonValueVariationDetail`. +- In `LDClient`/`LDClientInterface`: `track(String, LDUser, JsonElement)` and `track(String, LDUser, JsonElement, double)`. Use `trackData(String, LDUser, LDValue)` and `trackMetric(String, LDUser, LDValue, double)`. The names are different to avoid compile-time ambiguity since both `JsonElement` and `LDValue` are nullable types. +- In `LDUserBuilder`: `custom(String, JsonElement)` and `privateCustom(String, JsonElement)`. Use the `LDValue` overloads. +- In `LDValue`: `fromJsonElement`, `unsafeFromJsonElement`, `asJsonElement`, `asUnsafeJsonElement`. These are provided for compatibility with code that still uses `JsonElement`, but will be removed in a future major version. + + +## [4.8.1] - 2019-10-17 +### Fixed: +- The NewRelic integration was broken when using the default uberjar distribution, because the SDK was calling `Class.forName()` for a class name that was accidentally transformed by the Shadow plugin for Gradle. ([#171](https://github.com/launchdarkly/java-server-sdk/issues/171)) +- Streaming connections were not using the proxy settings specified by `LDConfig.Builder.proxy()` and `LDConfig.Builder.proxyAuthenticator()`. ([#172](https://github.com/launchdarkly/java-server-sdk/issues/172)) +- The SDK was creating an unused `OkHttpClient` instance as part of the static `LDConfig` instance used by the `LDClient(String)` constructor. This has been removed. +- Passing a null `sdkKey` or `config` to the `LDClient` constructors would always throw a `NullPointerException`, but it did not have a descriptive message. These exceptions now explain which parameter was null. + +## [4.8.0] - 2019-09-30 +### Added: +- Added support for upcoming LaunchDarkly experimentation features. See `LDClient.track(String, LDUser, JsonElement, double)`. + +### Changed: +- Updated documentation comment for `intVariation` to clarify the existing rounding behavior for floating-point values: they are rounded toward zero. + +## [4.7.1] - 2019-08-19 +### Fixed: +- Fixed a race condition that could cause a `NumberFormatException` to be logged when delivering event data to LaunchDarkly (although the exception did not prevent the events from being delivered). + +## [4.7.0] - 2019-08-02 +### Added: +- In `RedisFeatureStoreBuilder`, the new methods `database`, `password`, and `tls` allow you to specify the database number, an optional password, and whether to make a secure connection to Redis. This is an alternative to specifying them as part of the Redis URI, e.g. `rediss://:PASSWORD@host:port/NUMBER`, which is also supported (previously, the database and password were supported in the URI, but the secure `rediss:` scheme was not). +- `LDConfig.Builder.sslSocketFactory` allows you to specify a custom socket factory and truststore for all HTTPS connections made by the SDK. This is for unusual cases where your Java environment does not have the proper root CA certificates to validate LaunchDarkly's certificate, or you are connecting through a secure proxy that has a self-signed certificate, and you do not want to modify Java's global truststore. + +### Deprecated: +- `LDConfig.Builder.samplingInterval` is now deprecated. The intended use case for the `samplingInterval` feature was to reduce analytics event network usage in high-traffic applications. This feature is being deprecated in favor of summary counters, which are meant to track all events. + +## [4.6.6] - 2019-07-10 +### Fixed: +- Under conditions where analytics events are being generated at an extremely high rate (for instance, if an application is evaluating a flag repeatedly in a tight loop on many threads), a thread could be blocked indefinitely within the `Variation` methods while waiting for the internal event processing logic to catch up with the backlog. The logic has been changed to drop events if necessary so threads will not be blocked (similar to how the SDK already drops events if the size of the event buffer is exceeded). If that happens, this warning message will be logged once: "Events are being produced faster than they can be processed; some events will be dropped". Under normal conditions this should never happen; this change is meant to avoid a concurrency bottleneck in applications that are already so busy that thread starvation is likely. + +## [4.6.5] - 2019-05-21 +### Fixed +- The `LDConfig.Builder` method `userKeysFlushInterval` was mistakenly setting the value of `flushInterval` instead. (Thanks, [kutsal](https://github.com/launchdarkly/java-server-sdk/pull/163)!) + +### Added +- CI tests now run against Java 8, 9, 10, and 11. + +## [4.6.4] - 2019-05-01 +### Changed +- Changed the artifact name from `com.launchdarkly:launchdarkly-client` to `com.launchdarkly:launchdarkly-java-server-sdk` +- Changed repository references to use the new URL + +There are no other changes in this release. Substituting `launchdarkly-client` version 4.6.3 with `launchdarkly-java-server-sdk` version 4.6.4 will not affect functionality. + +## [4.6.3] - 2019-03-21 +### Fixed +- The SDK uberjars contained some JSR305 annotation classes such as `javax.annotation.Nullable`. These have been removed. They were not being used in the public API anyway. ([#156](https://github.com/launchdarkly/java-server-sdk/issues/156)) +- If `track` or `identify` is called without a user, the SDK now logs a warning, and does not send an analytics event to LaunchDarkly (since it would not be processed without a user). +### Note on future releases + +The LaunchDarkly SDK repositories are being renamed for consistency. This repository is now `java-server-sdk` rather than `java-client`. + +The artifact names will also change. In the 4.6.3 release, the generated artifact was named `com.launchdarkly.client:launchdarkly-client`; in all future releases, it will be `com.launchdarkly.client:launchdarkly-java-server-sdk`. + +## [4.6.2] - 2019-02-21 +### Fixed +- If an unrecoverable `java.lang.Error` is thrown within the analytics event dispatching thread, the SDK will now log the error stacktrace to the configured logger and then disable event sending, so that all further events are simply discarded. Previously, the SDK could be left in a state where application threads would continue trying to push events onto a queue that was no longer being consumed, which could block those threads. The SDK will not attempt to restart the event thread after such a failure, because an `Error` typically indicates a serious problem with the application environment. +- Summary event counters now use 64-bit integers instead of 32-bit, so they will not overflow if there is an extremely large volume of events. +- The SDK's CI test suite now includes running the tests in Windows. + +## [4.6.1] - 2019-01-14 +### Fixed +- Fixed a potential race condition that could happen when using a DynamoDB or Consul feature store. The Redis feature store was not affected. + + +## [4.6.0] - 2018-12-12 +### Added: +- The SDK jars now contain OSGi manifests which should make it possible to use them as bundles. The default jar requires Gson and SLF4J to be provided by other bundles, while the jar with the "all" classifier contains versions of Gson and SLF4J which it both exports and imports (i.e. it self-wires them, so it will use a higher version if you provide one). The "thin" jar is not recommended in an OSGi environment because it requires many dependencies which may not be available as bundles. +- There are now helper classes that make it much simpler to write a custom `FeatureStore` implementation. See the `com.launchdarkly.client.utils` package. The Redis feature store has been revised to use this code, although its functionality is unchanged except for the fix mentioned below. +- `FeatureStore` caching parameters (for Redis or other databases) are now encapsulated in the `FeatureStoreCacheConfig` class. + +### Changed: +- The exponential backoff behavior when a stream connection fails has changed as follows. Previously, the backoff delay would increase for each attempt if the connection could not be made at all, or if a read timeout happened; but if a connection was made and then an error (other than a timeout) occurred, the delay would be reset to the minimum value. Now, the delay is only reset if a stream connection is made and remains open for at least a minute. + +### Fixed: +- The Redis feature store would incorrectly report that it had not been initialized, if there happened to be no feature flags in your environment at the time that it was initialized. + +### Deprecated: +- The `RedisFeatureStoreBuilder` methods `cacheTime`, `refreshStaleValues`, and `asyncRefresh` are deprecated in favor of the new `caching` method which sets these all at once. + +## [4.5.1] - 2018-11-21 +### Fixed: +- Fixed a build error that caused the `com.launchdarkly.client.files` package (the test file data source component added in v4.5.0) to be inaccessible unless you were using the "thin" jar. +- Stream connection errors are now logged at `WARN` level, rather than `ERROR`. + +## [4.5.0] - 2018-10-26 +### Added: +It is now possible to inject feature flags into the client from local JSON or YAML files, replacing the normal LaunchDarkly connection. This would typically be for testing purposes. See `com.launchdarkly.client.files.FileComponents`. + +## [4.4.1] - 2018-10-15 +### Fixed: +- The SDK's Maven releases had a `pom.xml` that mistakenly referenced dependencies that are actually bundled (with shading) inside of our jar, resulting in those dependencies being redundantly downloaded and included (without shading) in the runtime classpath, which could cause conflicts. This has been fixed. ([#122](https://github.com/launchdarkly/java-server-sdk/issues/122)) + +## [4.4.0] - 2018-10-01 +### Added: +- The `allFlagsState()` method now accepts a new option, `FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS`, which reduces the size of the JSON representation of the flag state by omitting some metadata. Specifically, it omits any data that is normally used for generating detailed evaluation events if a flag does not have event tracking or debugging turned on. + +### Fixed: +- JSON data from `allFlagsState()` is now slightly smaller even if you do not use the new option described above, because it completely omits the flag property for event tracking unless that property is `true`. + +## [4.3.2] - 2018-09-11 +### Fixed: +- Event delivery now works correctly when the events are being forwarded through a [LaunchDarkly Relay Proxy](https://github.com/launchdarkly/ld-relay). + + +## [4.3.1] - 2018-09-04 +### Fixed: +- When evaluating a prerequisite feature flag, the analytics event for the evaluation did not include the result value if the prerequisite flag was off. +- The default Gson serialization for `LDUser` now includes all user properties. Previously, it omitted `privateAttributeNames`. + +## [4.3.0] - 2018-08-27 +### Added: +- The new `LDClient` method `allFlagsState()` should be used instead of `allFlags()` if you are passing flag data to the front end for use with the JavaScript SDK. It preserves some flag metadata that the front end requires in order to send analytics events correctly. Versions 2.5.0 and above of the JavaScript SDK are able to use this metadata, but the output of `allFlagsState()` will still work with older versions. +- The `allFlagsState()` method also allows you to select only client-side-enabled flags to pass to the front end, by using the option `FlagsStateOption.CLIENT_SIDE_ONLY`. ([#112](https://github.com/launchdarkly/java-server-sdk/issues/112)) +- The new `LDClient` methods `boolVariationDetail`, `intVariationDetail`, `doubleVariationDetail`, `stringVariationDetail`, and `jsonVariationDetail` allow you to evaluate a feature flag (using the same parameters as you would for `boolVariation`, etc.) and receive more information about how the value was calculated. This information is returned in an `EvaluationDetail` object, which contains both the result value and an `EvaluationReason` which will tell you, for instance, if the user was individually targeted for the flag or was matched by one of the flag's rules, or if the flag returned the default value due to an error. + +### Fixed: +- Fixed a bug in `LDUser.Builder` that would throw an exception if you initialized the builder by copying an existing user, and then tried to add a custom attribute. + +### Deprecated: +- `LDClient.allFlags()` + +## [4.2.2] - 2018-08-17 +### Fixed: +- When logging errors related to the evaluation of a specific flag, the log message now always includes the flag key. +- Exception stacktraces are now logged only at DEBUG level. Previously, some were being logged at ERROR level. + +## [4.2.1] - 2018-07-16 +### Fixed: +- Should not permanently give up on posting events if the server returns a 400 error. +- Fixed a bug in the Redis store that caused an unnecessary extra Redis query (and a debug-level log message about updating a flag with the same version) after every update of a flag. + +## [4.2.0] - 2018-06-26 +### Added: +- New overloads of `LDUser.Builder.custom` and `LDUser.Builder.privateCustom` allow you to set a custom attribute value to any JSON element. + +### Changed: +- The client now treats most HTTP 4xx errors as unrecoverable: that is, after receiving such an error, it will not make any more HTTP requests for the lifetime of the client instance, in effect taking the client offline. This is because such errors indicate either a configuration problem (invalid SDK key) or a bug, which is not likely to resolve without a restart or an upgrade. This does not apply if the error is 400, 408, 429, or any 5xx error. +- During initialization, if the client receives any of the unrecoverable errors described above, the client constructor will return immediately; previously it would continue waiting until a timeout. The `initialized()` method will return false in this case. + +## [4.1.0] - 2018-05-15 + +### Added: +- The new user builder methods `customValues` and `privateCustomValues` allow you to add a custom user attribute with multiple JSON values of mixed types. ([#126](https://github.com/launchdarkly/java-server-sdk/issues/126)) +- The new constant `VersionedDataKind.ALL` is a list of all existing `VersionedDataKind` instances. This is mainly useful if you are writing a custom `FeatureStore` implementation. + +## [4.0.0] - 2018-05-10 + +### Changed: +- To reduce the network bandwidth used for analytics events, feature request events are now sent as counters rather than individual events, and user details are now sent only at intervals rather than in each event. These behaviors can be modified through the LaunchDarkly UI and with the new configuration option `inlineUsersInEvents`. +- When sending analytics events, if there is a connection error or an HTTP 5xx response, the client will try to send the events again one more time after a one-second delay. +- The `LdClient` class is now `final`. + +### Added: +- New methods on `LDConfig.Builder` (`updateProcessorFactory`, `featureStoreFactory`, `eventProcessorFactory`) allow you to specify different implementations of each of the main client subcomponents (receiving feature state, storing feature state, and sending analytics events) for testing or for any other purpose. The `Components` class provides factories for all built-in implementations of these. + +### Deprecated: +- The `featureStore` configuration method is deprecated, replaced by the new factory-based mechanism described above. + + +## [3.0.3] - 2018-03-26 +### Fixed +* In the Redis feature store, fixed a synchronization problem that could cause a feature flag update to be missed if several of them happened in rapid succession. +* Fixed a bug that would cause a `NullPointerException` when trying to evaluate a flag rule that contained an unknown operator type. This could happen if you started using some recently added feature flag functionality in the LaunchDarkly application but had not yet upgraded the SDK to a version that supports that feature. In this case, it should now simply treat that rule as a non-match. + +### Changed +* The log message "Attempted to update ... with a version that is the same or older" has been downgraded from `WARN` level to `DEBUG`. It can happen frequently in normal operation when the client is in streaming mode, and is not a cause for concern. + +## [3.0.2] - 2018-03-01 +### Fixed +- Improved performance when evaluating flags with custom attributes, by avoiding an unnecessary caught exception (thanks, [rbalamohan](https://github.com/launchdarkly/java-server-sdk/issues/113)). + + +## [3.0.1] - 2018-02-22 +### Added +- Support for a new LaunchDarkly feature: reusable user segments. + +### Changed +- The `FeatureStore` interface has been changed to support user segment data as well as feature flags. Existing code that uses `InMemoryFeatureStore` or `RedisFeatureStore` should work as before, but custom feature store implementations will need to be updated. +- Removed deprecated methods. + + +## [3.0.0] - 2018-02-21 + +_This release was broken and should not be used._ + + +## [2.6.1] - 2018-03-01 +### Fixed +- Improved performance when evaluating flags with custom attributes, by avoiding an unnecessary caught exception (thanks, [rbalamohan](https://github.com/launchdarkly/java-server-sdk/issues/113)). + + +## [2.6.0] - 2018-02-12 +## Added +- Adds support for a future LaunchDarkly feature, coming soon: semantic version user attributes. + +## Changed +- It is now possible to compute rollouts based on an integer attribute of a user, not just a string attribute. + + +## [2.5.1] - 2018-01-31 + +## Changed +- All threads created by the client are now daemon threads. +- Fixed a bug that could result in a previously deleted feature flag appearing to be available again. +- Reduced the logging level for use of an unknown feature flag from `WARN` to `INFO`. + + +## [2.5.0] - 2018-01-08 +## Added +- Support for specifying [private user attributes](https://docs.launchdarkly.com/home/users/attributes#creating-private-user-attributes) in order to prevent user attributes from being sent in analytics events back to LaunchDarkly. See the `allAttributesPrivate` and `privateAttributeNames` methods on `LDConfig.Builder` as well as the `privateX` methods on `LDUser.Builder`. + +## [2.4.0] - 2017-12-20 +## Changed +- Added an option to disable sending analytics events +- No longer attempt to reconnect if a 401 response is received (this would indicate an invalid SDK key, so retrying won't help) +- Simplified logic to detect dropped stream connections +- Increased default polling interval to 30s +- Use flag data in redis before stream connection is established, if possible (See #107) +- Avoid creating HTTP cache when streaming mode is enabled (as it won't be useful). This makes it possible to use the SDK in Google App Engine and other environments with no mutable disk access. + + +## [2.3.4] - 2017-10-25 +## Changed +- Removed GSON dependency from default jar (fixes #103) + +## [2.3.2] - 2017-09-22 +## Changed +- Only log a warning on the first event that overflows the event buffer [#102] + +## [2.3.1] - 2017-08-11 +## Changed +- Updated okhttp-eventsource dependency to 1.5.0 to pick up better connection timeout handling. + +## [2.3.0] - 2017-07-10 +## Added +- LDUser `Builder` constructor which accepts a previously built user as an initialization parameter. + + +## [2.2.6] - 2017-06-16 +## Added +- #96 `LDUser` now has `equals()` and `hashCode()` methods + +## Changed +- #93 `LDClient` now releases resources more quickly when shutting down + +## [2.2.5] - 2017-06-02 +## Changed +- Improved Gson compatibility (added no-args constructors for classes we deserialize) +- Automated release process + + +## [2.2.4] - 2017-06-02 +## Changed +- Improved Gson compatibility (added no-args constructors for classes we deserialize) +- Automated release process + + +## [2.2.3] - 2017-05-10 +### Fixed +- Fixed issue where stream connection failed to fully establish + +## [2.2.2] - 2017-05-05 +### Fixed +- In Java 7, connections to LaunchDarkly are now possible using TLSv1.1 and/or TLSv1.2 +- The order of SSE stream events is now preserved. ([launchdarkly/okhttp-eventsource#19](https://github.com/launchdarkly/okhttp-eventsource/issues/19)) + +## [2.2.1] - 2017-04-25 +### Fixed +- [#92](https://github.com/launchdarkly/java-server-sdk/issues/92) Regex `matches` targeting rules now include the user if +a match is found anywhere in the attribute. Before fixing this bug, the entire attribute needed to match the pattern. + +## [2.2.0] - 2017-04-11 +### Added +- Authentication for proxied http requests is now supported (Basic Auth only) + +### Changed +- Improved Redis connection pool management. + +## [2.1.0] - 2017-03-02 +### Added +- LdClientInterface (and its implementation) have a new method: `boolean isFlagKnown(String featureKey)` which checks for a +feature flag's existence. Thanks @yuv422! + +## [2.0.11] - 2017-02-24 +### Changed +- EventProcessor now respects the connect and socket timeouts configured with LDConfig. + +## [2.0.10] - 2017-02-06 +### Changed +- Updated okhttp-eventsource dependency to bring in newer okhttp dependency +- Added more verbose debug level logging when sending events + +## [2.0.9] - 2017-01-24 +### Changed +- StreamProcessor uses the proxy configuration specified by LDConfig. + +## [2.0.8] - 2016-12-22 +### Changed +- Better handling of null default values. + +## [2.0.7] - 2016-12-21 +### Changed +- allFlags() method on client no longer returns null when client is in offline mode. + +## [2.0.6] - 2016-11-21 +### Changed +- RedisFeatureStore: Update Jedis dependency. Improved thread/memory management. + +## [2.0.5] - 2016-11-09 +### Changed +- The StreamProcessor now listens for heartbeats from the streaming API, and will automatically reconnect if heartbeats are not received. + +## [2.0.4] - 2016-10-12 +### Changed +- Updated GSON dependency version to 2.7 + +## [2.0.3] - 2016-10-10 +### Added +- StreamingProcessor now supports increasing retry delays with jitter. Addresses [https://github.com/launchdarkly/java-server-sdk/issues/74[(https://github.com/launchdarkly/java-server-sdk/issues/74) + +## [2.0.2] - 2016-09-13 +### Added +- Now publishing artifact with 'all' classifier that includes SLF4J for ColdFusion or other systems that need it. + +## [2.0.1] - 2016-08-12 +### Removed +- Removed slf4j from default artifact: [#71](https://github.com/launchdarkly/java-server-sdk/issues/71) + +## [2.0.0] - 2016-08-08 +### Added +- Support for multivariate feature flags. New methods `boolVariation`, `jsonVariation` and `intVariation` and `doubleVariation` for multivariates. +- Added `LDClientInterface`, an interface suitable for mocking `LDClient`. + +### Changed +- The `Feature` data model has been replaced with `FeatureFlag`. `FeatureFlag` is not generic. +- The `allFlags` method now returns a `Map` to support multivariate flags. + +### Deprecated +- The `toggle` call has been deprecated in favor of `boolVariation`. + +### Removed +- The `getFlag` call has been removed. +- The `debugStreaming` configuration option has been removed. diff --git a/lib/sdk/server/CODEOWNERS b/lib/sdk/server/CODEOWNERS new file mode 100644 index 0000000..f541913 --- /dev/null +++ b/lib/sdk/server/CODEOWNERS @@ -0,0 +1,2 @@ +# Repository Maintainers +* @launchdarkly/team-sdk-java diff --git a/lib/sdk/server/CONTRIBUTING.md b/lib/sdk/server/CONTRIBUTING.md new file mode 100644 index 0000000..3dfa4e2 --- /dev/null +++ b/lib/sdk/server/CONTRIBUTING.md @@ -0,0 +1,77 @@ +# Contributing to the LaunchDarkly Server-side SDK for Java + +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. + +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/java-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. + +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +## Build instructions + +### Prerequisites + +The SDK builds with [Gradle](https://gradle.org/) and should be built against Java 8. + +Many basic classes are implemented in the module `launchdarkly-java-sdk-common`, whose source code is in the [`launchdarkly/java-sdk-common`](https://github.com/launchdarkly/java-sdk-common) repository; this is so the common code can be shared with the LaunchDarkly Android SDK. By design, the LaunchDarkly Java SDK distribution does not expose a dependency on that module; instead, its classes and Javadoc content are embedded in the SDK jars. + +### Building + +To build the SDK without running any tests: +``` +./gradlew jar +``` + +If you wish to clean your working directory between builds, you can clean it by running: +``` +./gradlew clean +``` + +If you wish to use your generated SDK artifact by another Maven/Gradle project such as [hello-java](https://github.com/launchdarkly/hello-java), you will likely want to publish the artifact to your local Maven repository so that your other project can access it. +``` +./gradlew publishToMavenLocal +``` + +### Testing + +To build the SDK and run all unit tests: +``` +./gradlew test +``` + +To run the SDK contract test suite in Linux (see [`contract-tests/README.md`](./contract-tests/README.md)): + +```bash +make contract-tests +``` + +### Benchmarks + +The project in the `benchmarks` subdirectory uses [JMH](https://openjdk.java.net/projects/code-tools/jmh/) to generate performance metrics for the SDK. This is run as a CI job, and can also be run manually by running `make` within `benchmarks` and then inspecting `build/reports/jmh`. + +## Coding best practices + +### Logging + +The SDK uses a LaunchDarkly logging facade, [`com.launchdarkly.logging`](https://github.com/launchdarkly/java-logging). By default, this facade sends output to SLF4J. + +Here some things to keep in mind for good logging behavior: + +1. Stick to the standardized logger name scheme defined in `Loggers.java`, preferably for all log output, but definitely for all log output above `DEBUG` level. Logger names can be useful for filtering log output, so it is desirable for users to be able to reference a clear, stable logger name like `com.launchdarkly.sdk.server.LDClient.Events` rather than a class name like `com.launchdarkly.sdk.server.EventSummarizer` which is an implementation detail. The text of a log message should be distinctive enough that we can easily find which class generated the message. + +2. Use parameterized messages (`logger.info("The value is {}", someValue)`) rather than string concatenation (`logger.info("The value is " + someValue)`). This avoids the overhead of string concatenation if the logger is not enabled for that level. If computing the value is an expensive operation, and it is _only_ relevant for logging, consider implementing that computation via a custom `toString()` method on some wrapper type so that it will be done lazily only if the log level is enabled. + +3. There is a standard pattern for logging exceptions, using the `com.launchdarkly.logging.LogValues` helpers. First, log the basic description of the exception at whatever level is appropriate (`WARN` or `ERROR`): `logger.warn("An error happened: {}", LogValues.exceptionSummary(ex))`. Then, log a stack at debug level: `logger.debug(LogValues.exceptionTrace(ex))`. The `exceptionTrace` helper is lazily evaluated so that the stacktrace will only be computed if debug logging is actually enabled. However, consider whether the stacktrace would be at all meaningful in this particular context; for instance, in a `try` block around a network I/O operation, the stacktrace would only tell us (a) some internal location in Java standard libraries and (b) the location in our own code where we tried to do the operation; (a) is very unlikely to tell us anything that the exception's type and message doesn't already tell us, and (b) could be more clearly communicated by just writing a specific log message. + +### Code coverage + +It is important to keep unit test coverage as close to 100% as possible in this project. You can view the latest code coverage report in CircleCI, as `coverage/html/index.html` in the artifacts for the "Java 11 - Linux - OpenJDK" job. You can also run the report locally with `./gradlew jacocoTestCoverage` and view `./build/reports/jacoco/test`. _The CircleCI build will fail if you commit a change that increases the number of uncovered lines_, unless you explicitly add an override as shown below. + +Sometimes a gap in coverage is unavoidable, usually because the compiler requires us to provide a code path for some condition that in practice can't happen and can't be tested, or because of a known issue with the code coverage tool. Please handle all such cases as follows: + +* Mark the code with an explanatory comment beginning with "COVERAGE:". +* Run the code coverage task with `./gradlew jacocoTestCoverageVerification`. It should fail and indicate how many lines of missed coverage exist in the method you modified. +* Add an item in the `knownMissedLinesForMethods` map in `build.gradle` that specifies that number of missed lines for that method signature. For instance, if the method `com.launchdarkly.sdk.server.SomeClass.someMethod(java.lang.String)` has two missed lines that cannot be covered, you would add `"SomeClass.someMethod(java.lang.String)": 2`. diff --git a/lib/sdk/server/LICENSE b/lib/sdk/server/LICENSE new file mode 100644 index 0000000..e82de54 --- /dev/null +++ b/lib/sdk/server/LICENSE @@ -0,0 +1,13 @@ +Copyright 2016 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/lib/sdk/server/Makefile b/lib/sdk/server/Makefile new file mode 100644 index 0000000..ffa1b6f --- /dev/null +++ b/lib/sdk/server/Makefile @@ -0,0 +1,33 @@ + +build: + ./gradlew jar + +clean: + ./gradlew clean + +test: + ./gradlew test + +TEMP_TEST_OUTPUT=/tmp/sdk-test-service.log + +# Add any extra sdk-test-harness parameters here, such as -skip for tests that are +# temporarily not working. +TEST_HARNESS_PARAMS= + +build-contract-tests: + @cd contract-tests && ../gradlew installDist + +start-contract-test-service: + @contract-tests/service/build/install/service/bin/service + +start-contract-test-service-bg: + @echo "Test service output will be captured in $(TEMP_TEST_OUTPUT)" + @make start-contract-test-service >$(TEMP_TEST_OUTPUT) 2>&1 & + +run-contract-tests: + @curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/v2/downloader/run.sh \ + | VERSION=v2 PARAMS="-url http://localhost:8000 -debug -stop-service-at-end $(TEST_HARNESS_PARAMS)" sh + +contract-tests: build-contract-tests start-contract-test-service-bg run-contract-tests + +.PHONY: build-contract-tests start-contract-test-service start-contract-test-service-bg run-contract-tests contract-tests diff --git a/lib/sdk/server/README.md b/lib/sdk/server/README.md new file mode 100644 index 0000000..251adb0 --- /dev/null +++ b/lib/sdk/server/README.md @@ -0,0 +1,79 @@ +# LaunchDarkly Server-side SDK for Java + +[![Circle CI](https://circleci.com/gh/launchdarkly/java-server-sdk.svg?style=shield)](https://circleci.com/gh/launchdarkly/java-server-sdk) +[![Javadocs](http://javadoc.io/badge/com.launchdarkly/launchdarkly-java-server-sdk.svg)](http://javadoc.io/doc/com.launchdarkly/launchdarkly-java-server-sdk) + +## LaunchDarkly overview + +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves trillions of feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) + +## Supported Java versions + +This version of the LaunchDarkly SDK works with Java 8 and above. + +## Distributions + +Two variants of the SDK jar are published to Maven: + +* The default uberjar - this is accessible as `com.launchdarkly:launchdarkly-java-server-sdk:jar` and is the dependency used in the "[Getting started](https://docs.launchdarkly.com/sdk/server-side/java#getting-started)" section of the SDK reference guide as well as in the [`hello-java`](https://github.com/launchdarkly/hello-java) sample app. This variant contains the SDK classes and all of its required dependencies. All bundled dependencies that are not surfaced in the public API have shaded package names (and are not exported in OSGi), so they will not interfere with any other versions of the same packages. +* The "thin" jar - add `thin` in Maven, or `:thin` in Gradle. This contains only the SDK classes, without its dependencies. Applications using this jar must provide all of the dependencies that are in the SDK's `build.gradle`, so it is intended for use only in special cases. + +Previous SDK versions also included a third classifier, `all`, which was the same as the default uberjar but also contained the SLF4J API. This no longer exists because the SDK no longer requires the SLF4J API to be in the classpath. + +## Getting started + +Refer to the [SDK reference guide](https://docs.launchdarkly.com/sdk/server-side/java#getting-started) for instructions on getting started with using the SDK. + +## Logging + +By default, the LaunchDarkly SDK uses [SLF4J](https://www.slf4j.org/) _if_ the SLF4J API is present in the classpath. SLF4J has its own configuration mechanisms for determining where output will go, and filtering by level and/or logger name. + +If SLF4J is not in the classpath, the SDK's default logging destination is `System.err`. + +The SDK can also be configured to use other adapters from the [com.launchdarkly.logging](https://github.com/launchdarkly/java-logging) facade. See `LoggingConfigurationBuilder`. This allows the logging behavior to be completely determined by the application, rather than by external SLF4J configuration. + +For an example of using the default SLF4J behavior with a simple console logging configuration, check out the [`slf4j-logging` branch](https://github.com/launchdarkly/hello-java/tree/slf4j-logging) of the [`hello-java`](https://github.com/launchdarkly/hello-java) project. The [main branch](https://github.com/launchdarkly/hello-java) of `hello-java` uses console logging that is programmatically configured without SLF4J. + +All loggers are namespaced under `com.launchdarkly`, if you are using name-based filtering. + +Be aware of two considerations when enabling the DEBUG log level: +1. Debug-level logs can be very verbose. It is not recommended that you turn on debug logging in high-volume environments. +1. Potentially sensitive information is logged including LaunchDarkly users created by you in your usage of this SDK. + +## Using flag data from a file + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See FileData for more details. + +## DNS caching issues + +LaunchDarkly servers operate in a load-balancing framework which may cause their IP addresses to change. This could result in the SDK failing to connect to LaunchDarkly if an old IP address is still in your system's DNS cache. + +Unlike some other languages, in Java the DNS caching behavior is controlled by the Java virtual machine rather than the operating system. The default behavior varies depending on whether there is a [security manager](https://docs.oracle.com/javase/tutorial/essential/environment/security.html): if there is, IP addresses will _never_ expire. In that case, we recommend that you set the security property `networkaddress.cache.ttl`, as described [here](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-jvm-ttl.html), to a number of seconds such as 30 or 60 (a lower value will reduce the chance of intermittent failures, but will slightly reduce networking performance). + +## Learn more + +Read our [documentation](https://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](https://docs.launchdarkly.com/docs/java-sdk-reference) or our [code-generated API documentation](https://launchdarkly.github.io/java-server-sdk/). + +## Testing + +We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates diff --git a/lib/sdk/server/SECURITY.md b/lib/sdk/server/SECURITY.md new file mode 100644 index 0000000..10f1d1a --- /dev/null +++ b/lib/sdk/server/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/lib/sdk/server/benchmarks/Makefile b/lib/sdk/server/benchmarks/Makefile new file mode 100644 index 0000000..06e8ff3 --- /dev/null +++ b/lib/sdk/server/benchmarks/Makefile @@ -0,0 +1,36 @@ +.PHONY: benchmark clean sdk + +BASE_DIR:=$(shell pwd) +PROJECT_DIR=$(shell cd .. && pwd) +SDK_VERSION=$(shell grep "version=" $(PROJECT_DIR)/gradle.properties | cut -d '=' -f 2) + +BENCHMARK_SDK_JAR=lib/launchdarkly-java-server-sdk.jar +BENCHMARK_TEST_JAR=lib/launchdarkly-java-server-sdk-test.jar +SDK_JARS_DIR=$(PROJECT_DIR)/build/libs +SDK_JAR=$(SDK_JARS_DIR)/launchdarkly-java-server-sdk-$(SDK_VERSION).jar +SDK_TEST_JAR=$(SDK_JARS_DIR)/launchdarkly-java-server-sdk-$(SDK_VERSION)-test.jar + +benchmark: $(BENCHMARK_SDK_JAR) $(BENCHMARK_TEST_JAR) + rm -rf build/tmp + ../gradlew jmh + cat build/reports/jmh/human.txt + ../gradlew jmhReport + +clean: + rm -rf build lib + +sdk: $(BENCHMARK_ALL_JAR) $(BENCHMARK_TEST_JAR) + +$(BENCHMARK_SDK_JAR): $(SDK_JAR) + mkdir -p lib + cp $< $@ + +$(BENCHMARK_TEST_JAR): $(SDK_TEST_JAR) + mkdir -p lib + cp $< $@ + +$(SDK_JAR): + cd .. && ./gradlew shadowJar + +$(SDK_TEST_JAR): + cd .. && ./gradlew testJar diff --git a/lib/sdk/server/benchmarks/build.gradle b/lib/sdk/server/benchmarks/build.gradle new file mode 100644 index 0000000..fa15e5d --- /dev/null +++ b/lib/sdk/server/benchmarks/build.gradle @@ -0,0 +1,67 @@ + +buildscript { + repositories { + mavenCentral() + } +} + +plugins { + id "me.champeau.gradle.jmh" version "0.5.0" + id "io.morethan.jmhreport" version "0.9.0" +} + +repositories { + mavenCentral() +} + +ext.versions = [ + "jmh": "1.21", + "guava": "19.0" +] + +dependencies { + implementation files("lib/launchdarkly-java-server-sdk.jar") + implementation files("lib/launchdarkly-java-server-sdk-test.jar") + implementation "com.google.code.gson:gson:2.8.9" + implementation "com.google.guava:guava:${versions.guava}" // required by SDK test code + implementation "com.squareup.okhttp3:mockwebserver:3.12.10" + implementation "org.openjdk.jmh:jmh-core:1.21" + implementation "org.openjdk.jmh:jmh-generator-annprocess:${versions.jmh}" +} + +// need to set duplicatesStrategy because otherwise some non-class files with +// duplicate names in our dependencies will cause an error +tasks.getByName('jmhJar').doFirst() {duplicatesStrategy(DuplicatesStrategy.EXCLUDE)} + +jmh { + iterations = 10 // Number of measurement iterations to do. + benchmarkMode = ['avgt'] // "average time" - reports execution time as ns/op and allocations as B/op. + // batchSize = 1 // Batch size: number of benchmark method calls per operation. (some benchmark modes can ignore this setting) + fork = 1 // How many times to forks a single benchmark. Use 0 to disable forking altogether + // failOnError = false // Should JMH fail immediately if any benchmark had experienced the unrecoverable error? + forceGC = true // Should JMH force GC between iterations? + humanOutputFile = project.file("${project.buildDir}/reports/jmh/human.txt") // human-readable output file + // resultsFile = project.file("${project.buildDir}/reports/jmh/results.txt") // results file + operationsPerInvocation = 3 // Operations per invocation. + // benchmarkParameters = [:] // Benchmark parameters. + profilers = [ 'gc' ] // Use profilers to collect additional data. Supported profilers: [cl, comp, gc, stack, perf, perfnorm, perfasm, xperf, xperfasm, hs_cl, hs_comp, hs_gc, hs_rt, hs_thr] + timeOnIteration = '1s' // Time to spend at each measurement iteration. + resultFormat = 'JSON' // Result format type (one of CSV, JSON, NONE, SCSV, TEXT) + // synchronizeIterations = false // Synchronize iterations? + // threads = 4 // Number of worker threads to run with. + // timeout = '1s' // Timeout for benchmark iteration. + timeUnit = 'ns' // Output time unit. Available time units are: [m, s, ms, us, ns]. + verbosity = 'NORMAL' // Verbosity mode. Available modes are: [SILENT, NORMAL, EXTRA] + warmup = '1s' // Time to spend at each warmup iteration. + warmupBatchSize = 2 // Warmup batch size: number of benchmark method calls per operation. + warmupIterations = 1 // Number of warmup iterations to do. + // warmupForks = 0 // How many warmup forks to make for a single benchmark. 0 to disable warmup forks. + // warmupMode = 'INDI' // Warmup mode for warming up selected benchmarks. Warmup modes are: [INDI, BULK, BULK_INDI]. + + jmhVersion = versions.jmh +} + +jmhReport { + jmhResultPath = project.file('build/reports/jmh/results.json') + jmhReportOutput = project.file('build/reports/jmh') +} diff --git a/lib/sdk/server/benchmarks/settings.gradle b/lib/sdk/server/benchmarks/settings.gradle new file mode 100644 index 0000000..81d1c11 --- /dev/null +++ b/lib/sdk/server/benchmarks/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'launchdarkly-java-server-sdk-benchmarks' diff --git a/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/LDClientEvaluationBenchmarks.java b/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/LDClientEvaluationBenchmarks.java new file mode 100644 index 0000000..9cba613 --- /dev/null +++ b/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/LDClientEvaluationBenchmarks.java @@ -0,0 +1,159 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; + +import java.util.Random; + +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static com.launchdarkly.sdk.server.TestValues.BOOLEAN_FLAG_KEY; +import static com.launchdarkly.sdk.server.TestValues.CLAUSE_MATCH_VALUE_COUNT; +import static com.launchdarkly.sdk.server.TestValues.FLAG_WITH_MULTI_VALUE_CLAUSE_KEY; +import static com.launchdarkly.sdk.server.TestValues.FLAG_WITH_PREREQ_KEY; +import static com.launchdarkly.sdk.server.TestValues.FLAG_WITH_TARGET_LIST_KEY; +import static com.launchdarkly.sdk.server.TestValues.INT_FLAG_KEY; +import static com.launchdarkly.sdk.server.TestValues.JSON_FLAG_KEY; +import static com.launchdarkly.sdk.server.TestValues.NOT_MATCHED_VALUE_CONTEXT; +import static com.launchdarkly.sdk.server.TestValues.NOT_TARGETED_CONTEXT_KEY; +import static com.launchdarkly.sdk.server.TestValues.SDK_KEY; +import static com.launchdarkly.sdk.server.TestValues.STRING_FLAG_KEY; +import static com.launchdarkly.sdk.server.TestValues.TARGETED_USER_KEYS; +import static com.launchdarkly.sdk.server.TestValues.UNKNOWN_FLAG_KEY; +import static com.launchdarkly.sdk.server.TestValues.makeTestFlags; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * These benchmarks cover just the evaluation logic itself (and, by necessity, the overhead of getting the + * flag to be evaluated out of the in-memory store). + */ +public class LDClientEvaluationBenchmarks { + @State(Scope.Thread) + public static class BenchmarkInputs { + // Initialization of the things in BenchmarkInputs does not count as part of a benchmark. + final LDClientInterface client; + final LDContext basicUser; + final Random random; + + public BenchmarkInputs() { + DataStore dataStore = initedDataStore(); + for (FeatureFlag flag: makeTestFlags()) { + upsertFlag(dataStore, flag); + } + + LDConfig config = new LDConfig.Builder() + .dataStore(specificComponent(dataStore)) + .events(Components.noEvents()) + .dataSource(Components.externalUpdatesOnly()) + .logging(Components.noLogging()) + .build(); + client = new LDClient(SDK_KEY, config); + + basicUser = LDContext.create("userkey"); + + random = new Random(); + } + } + + @Benchmark + public void boolVariationForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.boolVariation(BOOLEAN_FLAG_KEY, inputs.basicUser, false); + } + + @Benchmark + public void boolVariationDetailForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.boolVariationDetail(BOOLEAN_FLAG_KEY, inputs.basicUser, false); + } + + @Benchmark + public void boolVariationForUnknownFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.boolVariation(UNKNOWN_FLAG_KEY, inputs.basicUser, false); + } + + @Benchmark + public void intVariationForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.intVariation(INT_FLAG_KEY, inputs.basicUser, 0); + } + + @Benchmark + public void intVariationDetailForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.intVariationDetail(INT_FLAG_KEY, inputs.basicUser, 0); + } + + @Benchmark + public void intVariationForUnknownFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.intVariation(UNKNOWN_FLAG_KEY, inputs.basicUser, 0); + } + + @Benchmark + public void stringVariationForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.stringVariation(STRING_FLAG_KEY, inputs.basicUser, ""); + } + + @Benchmark + public void stringVariationDetailForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.stringVariationDetail(STRING_FLAG_KEY, inputs.basicUser, ""); + } + + @Benchmark + public void stringVariationForUnknownFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.stringVariation(UNKNOWN_FLAG_KEY, inputs.basicUser, ""); + } + + @Benchmark + public void jsonVariationForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.jsonValueVariation(JSON_FLAG_KEY, inputs.basicUser, LDValue.ofNull()); + } + + @Benchmark + public void jsonVariationDetailForSimpleFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.jsonValueVariationDetail(JSON_FLAG_KEY, inputs.basicUser, LDValue.ofNull()); + } + + @Benchmark + public void jsonVariationForUnknownFlag(BenchmarkInputs inputs) throws Exception { + inputs.client.jsonValueVariation(UNKNOWN_FLAG_KEY, inputs.basicUser, LDValue.ofNull()); + } + + @Benchmark + public void userFoundInTargetList(BenchmarkInputs inputs) throws Exception { + String userKey = TARGETED_USER_KEYS.get(inputs.random.nextInt(TARGETED_USER_KEYS.size())); + boolean result = inputs.client.boolVariation(FLAG_WITH_TARGET_LIST_KEY, LDContext.create(userKey), false); + assertTrue(result); + } + + @Benchmark + public void userNotFoundInTargetList(BenchmarkInputs inputs) throws Exception { + boolean result = inputs.client.boolVariation(FLAG_WITH_TARGET_LIST_KEY, LDContext.create(NOT_TARGETED_CONTEXT_KEY), false); + assertFalse(result); + } + + @Benchmark + public void flagWithPrerequisite(BenchmarkInputs inputs) throws Exception { + boolean result = inputs.client.boolVariation(FLAG_WITH_PREREQ_KEY, inputs.basicUser, false); + assertTrue(result); + } + + @Benchmark + public void userValueFoundInClauseList(BenchmarkInputs inputs) throws Exception { + int i = inputs.random.nextInt(CLAUSE_MATCH_VALUE_COUNT); + LDContext context = TestValues.CLAUSE_MATCH_VALUE_CONTEXTS.get(i); + boolean result = inputs.client.boolVariation(FLAG_WITH_MULTI_VALUE_CLAUSE_KEY, context, false); + assertTrue(result); + } + + @Benchmark + public void userValueNotFoundInClauseList(BenchmarkInputs inputs) throws Exception { + boolean result = inputs.client.boolVariation(FLAG_WITH_MULTI_VALUE_CLAUSE_KEY, NOT_MATCHED_VALUE_CONTEXT, false); + assertFalse(result); + } +} diff --git a/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/TestValues.java b/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/TestValues.java new file mode 100644 index 0000000..4d3a675 --- /dev/null +++ b/lib/sdk/server/benchmarks/src/jmh/java/com/launchdarkly/sdk/server/TestValues.java @@ -0,0 +1,117 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Target; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; + +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; + +public abstract class TestValues { + private TestValues() {} + + public static final String SDK_KEY = "sdk-key"; + + public static final LDContext BASIC_CONTEXT = LDContext.create("userkey"); + + public static final String BOOLEAN_FLAG_KEY = "flag-bool"; + public static final String INT_FLAG_KEY = "flag-int"; + public static final String STRING_FLAG_KEY = "flag-string"; + public static final String JSON_FLAG_KEY = "flag-json"; + public static final String FLAG_WITH_TARGET_LIST_KEY = "flag-with-targets"; + public static final String FLAG_WITH_PREREQ_KEY = "flag-with-prereq"; + public static final String FLAG_WITH_MULTI_VALUE_CLAUSE_KEY = "flag-with-multi-value-clause"; + public static final String UNKNOWN_FLAG_KEY = "no-such-flag"; + + public static final List TARGETED_USER_KEYS; + static { + TARGETED_USER_KEYS = new ArrayList<>(); + for (int i = 0; i < 1000; i++) { + TARGETED_USER_KEYS.add("user-" + i); + } + } + public static final String NOT_TARGETED_CONTEXT_KEY = "no-match"; + + public static final String CLAUSE_MATCH_ATTRIBUTE = "clause-match-attr"; + public static final int CLAUSE_MATCH_VALUE_COUNT = 1000; + public static final List CLAUSE_MATCH_VALUES; + public static final List CLAUSE_MATCH_VALUE_CONTEXTS; + static { + // pre-generate all these values and matching users so this work doesn't count in the evaluation benchmark performance + CLAUSE_MATCH_VALUES = new ArrayList<>(CLAUSE_MATCH_VALUE_COUNT); + CLAUSE_MATCH_VALUE_CONTEXTS = new ArrayList<>(CLAUSE_MATCH_VALUE_COUNT); + for (int i = 0; i < 1000; i++) { + LDValue value = LDValue.of("value-" + i); + LDContext context = LDContext.builder("key").set(CLAUSE_MATCH_ATTRIBUTE, value).build(); + CLAUSE_MATCH_VALUES.add(value); + CLAUSE_MATCH_VALUE_CONTEXTS.add(context); + } + } + public static final LDValue NOT_MATCHED_VALUE = LDValue.of("no-match"); + public static final LDContext NOT_MATCHED_VALUE_CONTEXT = + LDContext.builder("key").set(CLAUSE_MATCH_ATTRIBUTE, NOT_MATCHED_VALUE).build(); + + public static final String EMPTY_JSON_DATA = "{\"flags\":{},\"segments\":{}}"; + + public static List makeTestFlags() { + List flags = new ArrayList<>(); + + flags.add(flagWithValue(BOOLEAN_FLAG_KEY, LDValue.of(true))); + flags.add(flagWithValue(INT_FLAG_KEY, LDValue.of(1))); + flags.add(flagWithValue(STRING_FLAG_KEY, LDValue.of("x"))); + flags.add(flagWithValue(JSON_FLAG_KEY, LDValue.buildArray().build())); + + FeatureFlag targetsFlag = flagBuilder(FLAG_WITH_TARGET_LIST_KEY) + .on(true) + .targets(new Target(null, new HashSet(TARGETED_USER_KEYS), 1)) + .fallthroughVariation(0) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)) + .build(); + flags.add(targetsFlag); + + FeatureFlag prereqFlag = flagBuilder("prereq-flag") + .on(true) + .fallthroughVariation(1) + .variations(LDValue.of(false), LDValue.of(true)) + .build(); + flags.add(prereqFlag); + + FeatureFlag flagWithPrereq = flagBuilder(FLAG_WITH_PREREQ_KEY) + .on(true) + .prerequisites(prerequisite("prereq-flag", 1)) + .fallthroughVariation(1) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)) + .build(); + flags.add(flagWithPrereq); + + AttributeRef matchAttr = AttributeRef.fromLiteral(CLAUSE_MATCH_ATTRIBUTE); + FeatureFlag flagWithMultiValueClause = flagBuilder(FLAG_WITH_MULTI_VALUE_CLAUSE_KEY) + .on(true) + .fallthroughVariation(0) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)) + .rules( + ruleBuilder() + .clauses(new DataModel.Clause(null, matchAttr, DataModel.Operator.in, CLAUSE_MATCH_VALUES, false)) + .build() + ) + .build(); + flags.add(flagWithMultiValueClause); + + return flags; + } + + public static final int TEST_EVENTS_COUNT = 1000; + + public static final LDValue CUSTOM_EVENT_DATA = LDValue.of("data"); +} diff --git a/lib/sdk/server/build.gradle b/lib/sdk/server/build.gradle new file mode 100644 index 0000000..802d9ea --- /dev/null +++ b/lib/sdk/server/build.gradle @@ -0,0 +1,644 @@ +import java.nio.file.Files +import java.nio.file.FileSystems +import java.nio.file.StandardCopyOption + +buildscript { + repositories { + mavenCentral() + mavenLocal() + } + dependencies { + classpath "org.eclipse.virgo.util:org.eclipse.virgo.util.osgi.manifest:3.5.0.RELEASE" + classpath "org.osgi:osgi_R4_core:1.0" + } +} + +plugins { + id "java" + id "java-library" + id "checkstyle" + id "jacoco" + id "signing" + id "com.github.johnrengelman.shadow" version "7.1.2" + id "maven-publish" + id "de.marcphilipp.nexus-publish" version "0.3.0" + id "io.codearte.nexus-staging" version "0.21.2" + id "org.ajoberstar.git-publish" version "2.1.3" + id "idea" +} + +repositories { + mavenLocal() + // Before LaunchDarkly release artifacts get synced to Maven Central they are here along with snapshots: + maven { url "https://oss.sonatype.org/content/groups/public/" } + mavenCentral() +} + +configurations { + commonClasses { + transitive false + } + commonDoc { + transitive false + } +} + +configurations.all { + // check for updates every build for dependencies with: 'changing: true' + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} + +allprojects { + group = 'com.launchdarkly' + version = "${version}" + archivesBaseName = 'launchdarkly-java-server-sdk' + sourceCompatibility = 1.8 + targetCompatibility = 1.8 +} + +ext { + sdkBasePackage = "com.launchdarkly.sdk" + sdkBaseName = "launchdarkly-java-server-sdk" + + // List any packages here that should be included in OSGi imports for the SDK, if they cannot + // be discovered by looking in our explicit dependencies. + systemPackageImports = [ "javax.net", "javax.net.ssl" ] +} + +ext.libraries = [:] + +ext.versions = [ + "commonsCodec": "1.15", + "gson": "2.8.9", + "guava": "32.0.1-jre", + "jackson": "2.11.2", + "launchdarklyJavaSdkCommon": "2.1.1", + "launchdarklyJavaSdkInternal": "1.3.0", + "launchdarklyLogging": "1.1.0", + "okhttp": "4.9.3", // specify this for the SDK build instead of relying on the transitive dependency from okhttp-eventsource + "okhttpEventsource": "4.1.0", + "reactorCore":"3.3.22.RELEASE", + "slf4j": "1.7.21", + "snakeyaml": "2.0", + "jedis": "2.9.0", + "annotations": "13" // also included transitively with okhttp +] + +// Add dependencies to "libraries.internal" that we use internally but do not necessarily +// expose in our public API. Putting dependencies here has the following effects: +// +// 1. Those classes will be embedded in the default uberjar +// (launchdarkly-java-server-sdk-n.n.n.jar), and also in the "all" jar +// (launchdarkly-java-server-sdk-n.n.n.jar). +// +// 2. The classes are renamed (shaded) within those jars, and all references to them are +// updated to use the shaded names. The only exception to this is classes from +// launchdarkly-java-sdk-common and launchdarkly-logging, which are meant to be part of +// the public API. +// +// 3. The "thin" jar does not contain those classes, and references to them from the code +// in the "thin" jar are *not* renamed. If an application is using the "thin" jar, it is +// expected to provide those classes on its classpath. +// +// 4. They do not appear as dependences in pom.xml. +// +// 5. They are not declared as package imports or package exports in OSGI manifests. +// +// Note that Gson is included here but Jackson is not, even though there is some Jackson +// helper code in java-sdk-common. The reason is that the SDK always needs to use Gson for +// its own usual business, so (except in the "thin" jar) we will be embedding a shaded +// copy of Gson; but we do not use Jackson normally, we just provide those helpers for use +// by applications that are already using Jackson. So we do not want to embed it and we do +// not want it to show up as a dependency at all in our pom (and it's been excluded from +// the launchdarkly-java-sdk-common pom for the same reason). However, we do include +// Jackson in "libraries.optional" because we need to generate OSGi optional import +// headers for it. +libraries.internal = [ + "com.launchdarkly:launchdarkly-java-sdk-common:${versions.launchdarklyJavaSdkCommon}", + "com.launchdarkly:launchdarkly-java-sdk-internal:${versions.launchdarklyJavaSdkInternal}", + "com.launchdarkly:launchdarkly-logging:${versions.launchdarklyLogging}", + "commons-codec:commons-codec:${versions.commonsCodec}", + "com.google.code.gson:gson:${versions.gson}", + "com.google.guava:guava:${versions.guava}", + "com.squareup.okhttp3:okhttp:${versions.okhttp}", + "com.launchdarkly:okhttp-eventsource:${versions.okhttpEventsource}", + "org.yaml:snakeyaml:${versions.snakeyaml}", + "org.jetbrains:annotations:${versions.annotations}" +] + +// Add dependencies to "libraries.optional" that are not exposed in our public API and are +// *not* embedded in the SDK jar. These are for optional things that will only work if +// they are already in the application classpath; we do not want show them as a dependency +// because that would cause them to be pulled in automatically in all builds. The reason +// we need to even mention them here at all is for the sake of OSGi optional import headers. +// Putting dependencies here has the following effects: +// +// 1. They are not embedded in any of our jars. +// +// 2. References to them (in any jar) are not modified. +// +// 3. They do not appear as dependencies in pom.xml. +// +// 4. In OSGi manifests, they are declared as optional package imports. +libraries.optional = [ + "com.fasterxml.jackson.core:jackson-core:${versions.jackson}", + "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}", + "org.slf4j:slf4j-api:${versions.slf4j}", + "io.projectreactor:reactor-core:${versions.reactorCore}", +] + +// Add dependencies to "libraries.test" that are used only in unit tests. +libraries.test = [ + "org.hamcrest:hamcrest-all:1.3", + "org.easymock:easymock:3.4", + "org.mockito:mockito-core:3.+", + "junit:junit:4.12", + "com.fasterxml.jackson.core:jackson-core:${versions.jackson}", + "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}", + "com.launchdarkly:test-helpers:2.0.1", + "io.projectreactor:reactor-core:${versions.reactorCore}", // this is to make javadoc happy when using the test classpath +] + +configurations { + // We need to define "internal" as a custom configuration that contains the same things as + // "implementation", because "implementation" has special behavior in Gradle that prevents us + // from referencing it the way we do in shadeDependencies(). + internal.extendsFrom implementation + optional +} + +dependencies { + implementation libraries.internal + testImplementation libraries.test, libraries.internal + optional libraries.optional + + internal libraries.internal + + commonClasses "com.launchdarkly:launchdarkly-java-sdk-common:${versions.launchdarklyJavaSdkCommon}" + commonDoc "com.launchdarkly:launchdarkly-java-sdk-common:${versions.launchdarklyJavaSdkCommon}:sources" +} + +checkstyle { + toolVersion = "9.3" + configFile file("${project.rootDir}/config/checkstyle/checkstyle.xml") +} + +task generateJava(type: Copy) { + // This updates Version.java + from 'src/templates/java' + into "src/main/java" + filter(org.apache.tools.ant.filters.ReplaceTokens, tokens: [VERSION: version.toString()]) +} + +compileJava { + classpath = configurations.internal + configurations.optional +} + +compileJava.dependsOn 'generateJava' + +jar { + // thin classifier means that the non-shaded non-fat jar is still available + // but is opt-in since users will have to specify it. + classifier = 'thin' + + from configurations.commonClasses.collect { zipTree(it) } + + // doFirst causes the following step to be run during Gradle's execution phase rather than the + // configuration phase; this is necessary because it accesses the build products + doFirst { + // In OSGi, the "thin" jar has to import all of its dependencies. + addOsgiManifest(project.tasks.jar, [ configurations.runtimeClasspath ], []) + } +} + +// This builds the default uberjar that contains all of our dependencies in shaded form, +// as well as com.launchdarkly.logging in unshaded form. It does not contain SLF4J; the +// application is expected to provide SLF4J in the classpath if desired. +shadowJar { + // No classifier means that the shaded jar becomes the default artifact + classifier = '' + + configurations = [ project.configurations.internal ] + + // Kotlin metadata for shaded classes should not be included - it confuses IDEs + exclude '**/*.kotlin_metadata' + exclude '**/*.kotlin_module' + exclude '**/*.kotlin_builtins' + + // Shadow is not supposed to copy any module-info.class files from dependencies, + // but sometimes it does unless we explicitly exclude them here + exclude '**/module-info.class' + + // doFirst causes the following steps to be run during Gradle's execution phase rather than the + // configuration phase; this is necessary because they access the build products + doFirst { + shadeDependencies(project.tasks.shadowJar) + // Note that "configurations.shadow" is the same as "libraries.external", except it contains + // objects with detailed information about the resolved dependencies. + addOsgiManifest(project.tasks.shadowJar, [], []) + } + + doLast { + replaceUnshadedClasses(project.tasks.shadowJar) + } +} + +task testJar(type: Jar, dependsOn: testClasses) { + classifier = 'test' + from sourceSets.test.output +} + +// custom tasks for creating source/javadoc jars +task sourcesJar(type: Jar, dependsOn: classes) { + classifier = 'sources' + from sourceSets.main.allSource +} + +task javadocJar(type: Jar, dependsOn: javadoc) { + classifier = 'javadoc' + from javadoc.destinationDir +} + +javadoc { + source configurations.commonDoc.collect { zipTree(it) } + include '**/*.java' + + // Use test classpath so Javadoc won't complain about java-sdk-common classes that internally + // reference stuff we don't use directly, like Jackson + classpath = sourceSets.test.compileClasspath + + // The following should allow hyperlinks to com.launchdarkly.logging classes to go to + // the correct external URLs + if (options instanceof StandardJavadocDocletOptions) { + (options as StandardJavadocDocletOptions).links( + "https://javadoc.io/doc/com.launchdarkly/launchdarkly-logging/${versions.launchdarklyLogging}" + ) + } +} + +// Force the Javadoc build to fail if there are any Javadoc warnings. See: https://discuss.gradle.org/t/javadoc-fail-on-warning/18141/3 +if (JavaVersion.current().isJava8Compatible()) { + tasks.withType(Javadoc) { + // The '-quiet' as second argument is actually a hack, + // since the one paramater addStringOption doesn't seem to + // work, we extra add '-quiet', which is added anyway by + // gradle. See https://github.com/gradle/gradle/issues/2354 + // See JDK-8200363 (https://bugs.openjdk.java.net/browse/JDK-8200363) + // for information about the -Xwerror option. + options.addStringOption('Xwerror', '-quiet') + } +} + +// Returns the names of all Java packages defined in this library - not including +// enclosing packages like "com" that don't have any classes in them. +def getAllSdkPackages() { + // base package classes come from launchdarkly-java-sdk-common + def names = [ "com.launchdarkly.sdk", "com.launchdarkly.sdk.json", "com.launchdarkly.logging" ] + project.convention.getPlugin(JavaPluginConvention).sourceSets.main.output.each { baseDir -> + if (baseDir.getPath().contains("classes" + File.separator + "java" + File.separator + "main")) { + baseDir.eachFileRecurse { f -> + if (f.name.endsWith(".class")) { + def subPath = f.getPath().substring(baseDir.getPath().length() + File.separator.length()) + def pkgName = subPath.substring(0, subPath.lastIndexOf(File.separator)).replace(File.separator, ".") + names += pkgName + } + } + } + } + names.unique() +} + +// Returns the names of all Java packages contained in the specified jar - not including +// enclosing packages like "com" that don't have any classes in them. +def getPackagesInDependencyJar(jarFile) { + new java.util.zip.ZipFile(jarFile).withCloseable { zf -> + zf.entries().findAll { !it.directory && it.name.endsWith(".class") }.collect { + it.name.contains("/") ? it.name.substring(0, it.name.lastIndexOf("/")).replace("/", ".") : "" + }.findAll { !it.equals("") && !it.startsWith("META-INF") }.unique() + } +} + +// Used by shadowJar to specify which packages should be renamed. +// +// The SDK's own packages should not be renamed (even though code in those packages will be +// modified to update any references to classes that are being renamed). +// +// Dependencies that are specified in the "optional" configuration should not be renamed. +// These are things that we will not be including in our uberjar anyway, but we want to make +// sure we can reference them by their original names if they are in the application +// classpath (which they may or may not be, since they are optional). +// +// This depends on our build products, so it can't be executed during Gradle's configuration +// phase; instead we have to run it after configuration, with the "afterEvaluate" block below. +def shadeDependencies(jarTask) { + def excludePackages = getAllSdkPackages() + + configurations.optional.collectMany { getPackagesInDependencyJar(it) } + def referencedPackages = + configurations.internal.collectMany { + getPackagesInDependencyJar(it) + }. + unique() + referencedPackages.forEach { packageToRelocate -> + jarTask.relocate(packageToRelocate, "com.launchdarkly.shaded." + packageToRelocate) { + excludePackages.forEach { exclude(it + ".*") } + } + } +} + +def replaceUnshadedClasses(jarTask) { + // The LDGson class is a special case where we do *not* want any of the Gson class names it uses to be + // modified by shading (because its purpose is to interoperate with a non-shaded instance of Gson). + // Shadow doesn't seem to provide a way to say "make this class file immune from the changes that result + // from shading *other* classes", so the workaround is to simply recopy the original class file. Note that + // we use a wildcard to make sure we also get any inner classes. + def protectedClassFilePattern = 'com/launchdarkly/sdk/json/LDGson*.class' + jarTask.exclude protectedClassFilePattern + def protectedClassFiles = configurations.commonClasses.collectMany { + zipTree(it).matching { + include protectedClassFilePattern + } getFiles() + } + def jarPath = jarTask.archiveFile.asFile.get().toPath() + FileSystems.newFileSystem(jarPath, (ClassLoader)null).withCloseable { fs -> + protectedClassFiles.forEach { classFile -> + def classSubpath = classFile.path.substring(classFile.path.indexOf("com/launchdarkly")) + Files.copy(classFile.toPath(), fs.getPath(classSubpath), StandardCopyOption.REPLACE_EXISTING) + } + } +} + +def getFileFromClasspath(config, filePath) { + def files = config.collectMany { + zipTree(it) matching { + include filePath + } getFiles() + } + if (files.size != 1) { + throw new RuntimeException("could not find " + filePath); + } + return files[0] +} + +def addOsgiManifest(jarTask, List importConfigs, List exportConfigs) { + // For a prerelease build with "-beta", "-rc", etc., the prerelease qualifier has to be + // removed from the bundle version because OSGi doesn't understand it. + def implementationVersion = version.replaceFirst('-.*$', '') + jarTask.manifest { + attributes( + "Implementation-Version": implementationVersion, + "Bundle-SymbolicName": "com.launchdarkly.sdk", + "Bundle-Version": implementationVersion, + "Bundle-Name": "LaunchDarkly SDK", + "Bundle-ManifestVersion": "2", + "Bundle-Vendor": "LaunchDarkly" + ) + + // Since we're not currently able to use bnd or the Gradle OSGi plugin, we're not discovering + // imports by looking at the actual code; instead, we're just importing whatever packages each + // dependency is exporting (if it has an OSGi manifest) or every package in the dependency (if + // it doesn't). + def imports = forEachArtifactAndVisiblePackage(importConfigs, { a, p -> + bundleImport(p, a.moduleVersion.id.version, nextMajorVersion(a.moduleVersion.id.version)) + }) + systemPackageImports + + // We also always add *optional* imports for Gson and Jackson, so that GsonTypeAdapters and + // JacksonTypeAdapters will work *if* Gson or Jackson is present externally. Currently we + // are hard-coding the Gson packages (they are special because there's also a shaded copy of + // them embedded in the jar, unrelated to this import) but there is probably a better way. + def optImports = [ "com.google.gson", "com.google.gson.reflect", "com.google.gson.stream" ] + forEachArtifactAndVisiblePackage([ configurations.optional ]) { a, p -> optImports += p } + imports += (optImports.join(";") + ";resolution:=optional" ) + + attributes("Import-Package": imports.join(",")) + + // Similarly, we're adding package exports for every package in whatever libraries we're + // making publicly available. + def sdkExports = getAllSdkPackages().collect { bundleExport(it, implementationVersion) } + def exportedDependencies = forEachArtifactAndVisiblePackage(exportConfigs, { a, p -> + bundleExport(p, a.moduleVersion.id.version) + }) + attributes("Export-Package": (sdkExports + exportedDependencies).join(",")) + } +} + +def bundleImport(packageName, importVersion, versionLimit) { + packageName + ";version=\"[" + importVersion + "," + versionLimit + ")\"" +} + +def bundleExport(packageName, exportVersion) { + packageName + ";version=\"" + exportVersion + "\"" +} + +def nextMajorVersion(v) { + def majorComponent = v.contains('.') ? v.substring(0, v.indexOf('.')) : v; + String.valueOf(Integer.parseInt(majorComponent) + 1) +} + +def forEachArtifactAndVisiblePackage(configs, closure) { + configs.collectMany { it.resolvedConfiguration.resolvedArtifacts } + .collectMany { a -> + def exportedPackages = getOsgiPackageExportsFromJar(a.file) + if (exportedPackages == null || exportedPackages.size == 0) { + // This dependency didn't specify OSGi exports, so we'll just have to assume that + // we might need to use any package that's in this jar (with a little special-casing + // to exclude things we probably should not be importing). + exportedPackages = getPackagesInDependencyJar(a.file) + .findAll { !it.contains(".internal") } + } + exportedPackages.collect { p -> closure(a, p) } + } +} + +def getOsgiPackageExportsFromJar(file) { + return new java.util.jar.JarFile(file).withCloseable { jar -> + def manifest = jar.manifest + if (manifest == null) { + return null + } + def dict = new java.util.Hashtable() // sadly, the manifest parser requires a Dictionary + manifest.mainAttributes.each { k, v -> dict.put(k.toString(), v.toString()) } + return org.eclipse.virgo.util.osgi.manifest.BundleManifestFactory.createBundleManifest(dict) + .exportPackage.exportedPackages.collect { it.packageName } + } +} + +artifacts { + archives jar, sourcesJar, javadocJar, shadowJar +} + +test { + testLogging { + events "passed", "skipped", "failed", "standardOut", "standardError" + showStandardStreams = true + exceptionFormat = 'full' + } +} + +jacocoTestReport { // code coverage report + reports { + xml.required = true + csv.required = true + html.required = true + } +} + +jacocoTestCoverageVerification { + // See notes in CONTRIBUTING.md on code coverage. Unfortunately we can't configure line-by-line code + // coverage overrides within the source code itself, because Jacoco operates on bytecode. + violationRules { rules -> + def knownMissedLinesForMethods = [ + // The key for each of these items is the complete method signature minus the "com.launchdarkly.sdk.server." prefix. + "DataSourceUpdatesImpl.OutageTracker.onTimeout()": 1, + "DataSourceUpdatesImpl.computeChangedItemsForFullDataSet(java.util.Map, java.util.Map)": 2, + "DefaultEventProcessor.EventProcessorMessage.waitForCompletion()": 3, + "DefaultEventProcessor.EventDispatcher.onUncaughtException(java.lang.Thread, java.lang.Throwable)": 8, + "DefaultEventProcessor.EventDispatcher.runMainLoop(java.util.concurrent.BlockingQueue, com.launchdarkly.sdk.server.DefaultEventProcessor.EventBuffer, com.launchdarkly.sdk.server.SimpleLRUCache, java.util.concurrent.BlockingQueue)": 4, + "DefaultEventProcessor.postToChannel(com.launchdarkly.sdk.server.DefaultEventProcessor.EventProcessorMessage)": 5, + "DefaultEventSender.sendEventData(com.launchdarkly.sdk.server.interfaces.EventSender.EventDataKind, java.lang.String, int, java.net.URI)": 1, + "EvaluatorOperators.ComparisonOp.test(int)": 1, + "EvaluatorOperators.apply(com.launchdarkly.sdk.server.DataModel.Operator, com.launchdarkly.sdk.LDValue, com.launchdarkly.sdk.LDValue, com.launchdarkly.sdk.server.EvaluatorPreprocessing.ClauseExtra.ValueExtra)": 1, + "LDClient.LDClient(java.lang.String)": 2, + "PersistentDataStoreStatusManager.1.run()": 2, + "PersistentDataStoreWrapper.PersistentDataStoreWrapper(com.launchdarkly.sdk.server.interfaces.PersistentDataStore, java.time.Duration, com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder.StaleValuesPolicy, boolean, com.launchdarkly.sdk.server.interfaces.DataStoreUpdates, java.util.concurrent.ScheduledExecutorService)": 2, + "PersistentDataStoreWrapper.getAll(com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind)": 3, + "PersistentDataStoreWrapper.deserialize(com.launchdarkly.sdk.server.interfaces.DataStoreTypes.DataKind, com.launchdarkly.sdk.server.interfaces.DataStoreTypes.SerializedItemDescriptor)": 2, + "SemanticVersion.parse(java.lang.String, boolean)": 2, + "Util.1.lambda\$authenticate\$0(okhttp3.Challenge)": 1, + "integrations.FileDataSourceImpl.FileDataSourceImpl(com.launchdarkly.sdk.server.interfaces.DataSourceUpdates, java.util.List, boolean)": 3, + "integrations.FileDataSourceImpl.FileWatcher.run()": 3, + "integrations.FileDataSourceParsing.FlagFileParser.detectJson(java.io.Reader)": 2 + ] + + knownMissedLinesForMethods.each { partialSignature, maxMissedLines -> + if (maxMissedLines > 0) { // < 0 means skip entire method + rules.rule { + element = "METHOD" + includes = [ "com.launchdarkly.sdk.server." + partialSignature ] + limit { + counter = "LINE" + value = "MISSEDCOUNT" + maximum = maxMissedLines + } + } + } + } + + // General rule that we should expect 100% test coverage; exclude any methods that have overrides above + rule { + element = "METHOD" + limit { + counter = "LINE" + value = "MISSEDCOUNT" + maximum = 0 + } + excludes = knownMissedLinesForMethods.collect { partialSignature, maxMissedLines -> + "com.launchdarkly.sdk.server." + partialSignature + } + } + } +} + +idea { + module { + downloadJavadoc = true + downloadSources = true + } +} + +nexusStaging { + packageGroup = "com.launchdarkly" + numberOfRetries = 40 // we've seen extremely long delays in closing repositories +} + +def pomConfig = { + name 'LaunchDarkly SDK for Java' + packaging 'jar' + url 'https://github.com/launchdarkly/java-server-sdk' + + licenses { + license { + name 'The Apache License, Version 2.0' + url 'http://www.apache.org/licenses/LICENSE-2.0.txt' + } + } + + developers { + developer { + name 'LaunchDarkly SDK Team' + email 'sdks@launchdarkly.com' + } + } + + scm { + connection 'scm:git:git://github.com/launchdarkly/java-server-sdk.git' + developerConnection 'scm:git:ssh:git@github.com:launchdarkly/java-server-sdk.git' + url 'https://github.com/launchdarkly/java-server-sdk' + } +} + +publishing { + publications { + shadow(MavenPublication) { publication -> + project.shadow.component(publication) + + artifact jar + artifact sourcesJar + artifact javadocJar + artifact testJar + + pom.withXml { + def root = asNode() + root.appendNode('description', 'Official LaunchDarkly SDK for Java') + root.children().last() + pomConfig + } + } + } + repositories { + mavenLocal() + } +} + +nexusPublishing { + clientTimeout = java.time.Duration.ofMinutes(2) // we've seen extremely long delays in creating repositories + repositories { + sonatype { + username = ossrhUsername + password = ossrhPassword + } + } +} + +signing { + sign publishing.publications.shadow +} + +tasks.withType(Sign) { t -> + onlyIf { !shouldSkipSigning() } // so we can build jars for testing in CI +} + +def shouldSkipSigning() { + return "1".equals(project.findProperty("LD_SKIP_SIGNING")) || + "1".equals(System.getenv("LD_SKIP_SIGNING")) +} + +// This task is used by the logic in ./packaging-test to get copies of all the direct and transitive +// dependencies of the SDK, so they can be put on the classpath as needed during tests. +task exportDependencies(type: Copy, dependsOn: compileJava) { + into "packaging-test/temp/dependencies-all" + from (configurations.internal.resolvedConfiguration.resolvedArtifacts.collect { it.file }) +} + +gitPublish { + repoUri = 'git@github.com:launchdarkly/java-server-sdk.git' + branch = 'gh-pages' + contents { + from javadoc + } + preserve { + // There's a dummy .circleci/config.yml file on the gh-pages branch so CircleCI won't + // complain when it sees a commit there. The git-publish plugin would delete that file if + // we didn't protect it here. + include '.circleci/config.yml' + } + commitMessage = 'publishing javadocs' +} diff --git a/lib/sdk/server/config/checkstyle/checkstyle.xml b/lib/sdk/server/config/checkstyle/checkstyle.xml new file mode 100644 index 0000000..9759ef3 --- /dev/null +++ b/lib/sdk/server/config/checkstyle/checkstyle.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/lib/sdk/server/config/checkstyle/suppressions.xml b/lib/sdk/server/config/checkstyle/suppressions.xml new file mode 100644 index 0000000..1959e98 --- /dev/null +++ b/lib/sdk/server/config/checkstyle/suppressions.xml @@ -0,0 +1,12 @@ + + + + + + + diff --git a/lib/sdk/server/contract-tests/README.md b/lib/sdk/server/contract-tests/README.md new file mode 100644 index 0000000..aa3942b --- /dev/null +++ b/lib/sdk/server/contract-tests/README.md @@ -0,0 +1,7 @@ +# SDK contract test service + +This directory contains an implementation of the cross-platform SDK testing protocol defined by https://github.com/launchdarkly/sdk-test-harness. See that project's `README` for details of this protocol, and the kinds of SDK capabilities that are relevant to the contract tests. This code should not need to be updated unless the SDK has added or removed such capabilities. + +To run these tests locally, run `make contract-tests` from the SDK project root directory. This downloads the correct version of the test harness tool automatically. + +Or, to test against an in-progress local version of the test harness, run `make start-contract-test-service` from the SDK project root directory; then, in the root directory of the `sdk-test-harness` project, build the test harness and run it from the command line. diff --git a/lib/sdk/server/contract-tests/gradle.properties b/lib/sdk/server/contract-tests/gradle.properties new file mode 100644 index 0000000..ea8e8fb --- /dev/null +++ b/lib/sdk/server/contract-tests/gradle.properties @@ -0,0 +1 @@ +gnsp.disableApplyOnlyOnRootProjectEnforcement=true diff --git a/lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.jar b/lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..7454180f2ae8848c63b8b4dea2cb829da983f2fa GIT binary patch literal 59536 zcma&NbC71ylI~qywr$(CZQJHswz}-9F59+k+g;UV+cs{`J?GrGXYR~=-ydruB3JCa zB64N^cILAcWk5iofq)<(fq;O7{th4@;QxID0)qN`mJ?GIqLY#rX8-|G{5M0pdVW5^ zzXk$-2kQTAC?_N@B`&6-N-rmVFE=$QD?>*=4<|!MJu@}isLc4AW#{m2if&A5T5g&~ ziuMQeS*U5sL6J698wOd)K@oK@1{peP5&Esut<#VH^u)gp`9H4)`uE!2$>RTctN+^u z=ASkePDZA-X8)rp%D;p*~P?*a_=*Kwc<^>QSH|^<0>o37lt^+Mj1;4YvJ(JR-Y+?%Nu}JAYj5 z_Qc5%Ao#F?q32i?ZaN2OSNhWL;2oDEw_({7ZbgUjna!Fqn3NzLM@-EWFPZVmc>(fZ z0&bF-Ch#p9C{YJT9Rcr3+Y_uR^At1^BxZ#eo>$PLJF3=;t_$2|t+_6gg5(j{TmjYU zK12c&lE?Eh+2u2&6Gf*IdKS&6?rYbSEKBN!rv{YCm|Rt=UlPcW9j`0o6{66#y5t9C zruFA2iKd=H%jHf%ypOkxLnO8#H}#Zt{8p!oi6)7#NqoF({t6|J^?1e*oxqng9Q2Cc zg%5Vu!em)}Yuj?kaP!D?b?(C*w!1;>R=j90+RTkyEXz+9CufZ$C^umX^+4|JYaO<5 zmIM3#dv`DGM;@F6;(t!WngZSYzHx?9&$xEF70D1BvfVj<%+b#)vz)2iLCrTeYzUcL z(OBnNoG6Le%M+@2oo)&jdOg=iCszzv59e zDRCeaX8l1hC=8LbBt|k5?CXgep=3r9BXx1uR8!p%Z|0+4Xro=xi0G!e{c4U~1j6!) zH6adq0}#l{%*1U(Cb%4AJ}VLWKBPi0MoKFaQH6x?^hQ!6em@993xdtS%_dmevzeNl z(o?YlOI=jl(`L9^ z0O+H9k$_@`6L13eTT8ci-V0ljDMD|0ifUw|Q-Hep$xYj0hTO@0%IS^TD4b4n6EKDG z??uM;MEx`s98KYN(K0>c!C3HZdZ{+_53DO%9k5W%pr6yJusQAv_;IA}925Y%;+!tY z%2k!YQmLLOr{rF~!s<3-WEUs)`ix_mSU|cNRBIWxOox_Yb7Z=~Q45ZNe*u|m^|)d* zog=i>`=bTe!|;8F+#H>EjIMcgWcG2ORD`w0WD;YZAy5#s{65~qfI6o$+Ty&-hyMyJ z3Ra~t>R!p=5ZpxA;QkDAoPi4sYOP6>LT+}{xp}tk+<0k^CKCFdNYG(Es>p0gqD)jP zWOeX5G;9(m@?GOG7g;e74i_|SmE?`B2i;sLYwRWKLy0RLW!Hx`=!LH3&k=FuCsM=9M4|GqzA)anEHfxkB z?2iK-u(DC_T1};KaUT@3nP~LEcENT^UgPvp!QC@Dw&PVAhaEYrPey{nkcn(ro|r7XUz z%#(=$7D8uP_uU-oPHhd>>^adbCSQetgSG`e$U|7mr!`|bU0aHl_cmL)na-5x1#OsVE#m*+k84Y^+UMeSAa zbrVZHU=mFwXEaGHtXQq`2ZtjfS!B2H{5A<3(nb-6ARVV8kEmOkx6D2x7~-6hl;*-*}2Xz;J#a8Wn;_B5=m zl3dY;%krf?i-Ok^Pal-}4F`{F@TYPTwTEhxpZK5WCpfD^UmM_iYPe}wpE!Djai6_{ z*pGO=WB47#Xjb7!n2Ma)s^yeR*1rTxp`Mt4sfA+`HwZf%!7ZqGosPkw69`Ix5Ku6G z@Pa;pjzV&dn{M=QDx89t?p?d9gna*}jBly*#1!6}5K<*xDPJ{wv4& zM$17DFd~L*Te3A%yD;Dp9UGWTjRxAvMu!j^Tbc}2v~q^59d4bz zvu#!IJCy(BcWTc`;v$9tH;J%oiSJ_i7s;2`JXZF+qd4C)vY!hyCtl)sJIC{ebI*0> z@x>;EzyBv>AI-~{D6l6{ST=em*U( z(r$nuXY-#CCi^8Z2#v#UXOt`dbYN1z5jzNF2 z411?w)whZrfA20;nl&C1Gi+gk<`JSm+{|*2o<< zqM#@z_D`Cn|0H^9$|Tah)0M_X4c37|KQ*PmoT@%xHc3L1ZY6(p(sNXHa&49Frzto& zR`c~ClHpE~4Z=uKa5S(-?M8EJ$zt0&fJk~p$M#fGN1-y$7!37hld`Uw>Urri(DxLa;=#rK0g4J)pXMC zxzraOVw1+kNWpi#P=6(qxf`zSdUC?D$i`8ZI@F>k6k zz21?d+dw7b&i*>Kv5L(LH-?J%@WnqT7j#qZ9B>|Zl+=> z^U-pV@1y_ptHo4hl^cPRWewbLQ#g6XYQ@EkiP z;(=SU!yhjHp%1&MsU`FV1Z_#K1&(|5n(7IHbx&gG28HNT)*~-BQi372@|->2Aw5It z0CBpUcMA*QvsPy)#lr!lIdCi@1k4V2m!NH)%Px(vu-r(Q)HYc!p zJ^$|)j^E#q#QOgcb^pd74^JUi7fUmMiNP_o*lvx*q%_odv49Dsv$NV;6J z9GOXKomA{2Pb{w}&+yHtH?IkJJu~}Z?{Uk++2mB8zyvh*xhHKE``99>y#TdD z&(MH^^JHf;g(Tbb^&8P*;_i*2&fS$7${3WJtV7K&&(MBV2~)2KB3%cWg#1!VE~k#C z!;A;?p$s{ihyojEZz+$I1)L}&G~ml=udD9qh>Tu(ylv)?YcJT3ihapi!zgPtWb*CP zlLLJSRCj-^w?@;RU9aL2zDZY1`I3d<&OMuW=c3$o0#STpv_p3b9Wtbql>w^bBi~u4 z3D8KyF?YE?=HcKk!xcp@Cigvzy=lnFgc^9c%(^F22BWYNAYRSho@~*~S)4%AhEttv zvq>7X!!EWKG?mOd9&n>vvH1p4VzE?HCuxT-u+F&mnsfDI^}*-d00-KAauEaXqg3k@ zy#)MGX!X;&3&0s}F3q40ZmVM$(H3CLfpdL?hB6nVqMxX)q=1b}o_PG%r~hZ4gUfSp zOH4qlEOW4OMUc)_m)fMR_rl^pCfXc{$fQbI*E&mV77}kRF z&{<06AJyJ!e863o-V>FA1a9Eemx6>^F$~9ppt()ZbPGfg_NdRXBWoZnDy2;#ODgf! zgl?iOcF7Meo|{AF>KDwTgYrJLb$L2%%BEtO>T$C?|9bAB&}s;gI?lY#^tttY&hfr# zKhC+&b-rpg_?~uVK%S@mQleU#_xCsvIPK*<`E0fHE1&!J7!xD#IB|SSPW6-PyuqGn3^M^Rz%WT{e?OI^svARX&SAdU77V(C~ zM$H{Kg59op{<|8ry9ecfP%=kFm(-!W&?U0@<%z*+!*<e0XesMxRFu9QnGqun6R_%T+B%&9Dtk?*d$Q zb~>84jEAPi@&F@3wAa^Lzc(AJz5gsfZ7J53;@D<;Klpl?sK&u@gie`~vTsbOE~Cd4 z%kr56mI|#b(Jk&;p6plVwmNB0H@0SmgdmjIn5Ne@)}7Vty(yb2t3ev@22AE^s!KaN zyQ>j+F3w=wnx7w@FVCRe+`vUH)3gW%_72fxzqX!S&!dchdkRiHbXW1FMrIIBwjsai8`CB2r4mAbwp%rrO>3B$Zw;9=%fXI9B{d(UzVap7u z6piC-FQ)>}VOEuPpuqznpY`hN4dGa_1Xz9rVg(;H$5Te^F0dDv*gz9JS<|>>U0J^# z6)(4ICh+N_Q`Ft0hF|3fSHs*?a=XC;e`sJaU9&d>X4l?1W=|fr!5ShD|nv$GK;j46@BV6+{oRbWfqOBRb!ir88XD*SbC(LF}I1h#6@dvK%Toe%@ zhDyG$93H8Eu&gCYddP58iF3oQH*zLbNI;rN@E{T9%A8!=v#JLxKyUe}e}BJpB{~uN zqgxRgo0*-@-iaHPV8bTOH(rS(huwK1Xg0u+e!`(Irzu@Bld&s5&bWgVc@m7;JgELd zimVs`>vQ}B_1(2#rv#N9O`fJpVfPc7V2nv34PC);Dzbb;p!6pqHzvy?2pD&1NE)?A zt(t-ucqy@wn9`^MN5apa7K|L=9>ISC>xoc#>{@e}m#YAAa1*8-RUMKwbm|;5p>T`Z zNf*ph@tnF{gmDa3uwwN(g=`Rh)4!&)^oOy@VJaK4lMT&5#YbXkl`q?<*XtsqD z9PRK6bqb)fJw0g-^a@nu`^?71k|m3RPRjt;pIkCo1{*pdqbVs-Yl>4E>3fZx3Sv44grW=*qdSoiZ9?X0wWyO4`yDHh2E!9I!ZFi zVL8|VtW38}BOJHW(Ax#KL_KQzarbuE{(%TA)AY)@tY4%A%P%SqIU~8~-Lp3qY;U-} z`h_Gel7;K1h}7$_5ZZT0&%$Lxxr-<89V&&TCsu}LL#!xpQ1O31jaa{U34~^le*Y%L za?7$>Jk^k^pS^_M&cDs}NgXlR>16AHkSK-4TRaJSh#h&p!-!vQY%f+bmn6x`4fwTp z$727L^y`~!exvmE^W&#@uY!NxJi`g!i#(++!)?iJ(1)2Wk;RN zFK&O4eTkP$Xn~4bB|q8y(btx$R#D`O@epi4ofcETrx!IM(kWNEe42Qh(8*KqfP(c0 zouBl6>Fc_zM+V;F3znbo{x#%!?mH3`_ANJ?y7ppxS@glg#S9^MXu|FM&ynpz3o&Qh z2ujAHLF3($pH}0jXQsa#?t--TnF1P73b?4`KeJ9^qK-USHE)4!IYgMn-7z|=ALF5SNGkrtPG@Y~niUQV2?g$vzJN3nZ{7;HZHzWAeQ;5P|@Tl3YHpyznGG4-f4=XflwSJY+58-+wf?~Fg@1p1wkzuu-RF3j2JX37SQUc? zQ4v%`V8z9ZVZVqS8h|@@RpD?n0W<=hk=3Cf8R?d^9YK&e9ZybFY%jdnA)PeHvtBe- zhMLD+SSteHBq*q)d6x{)s1UrsO!byyLS$58WK;sqip$Mk{l)Y(_6hEIBsIjCr5t>( z7CdKUrJTrW%qZ#1z^n*Lb8#VdfzPw~OIL76aC+Rhr<~;4Tl!sw?Rj6hXj4XWa#6Tp z@)kJ~qOV)^Rh*-?aG>ic2*NlC2M7&LUzc9RT6WM%Cpe78`iAowe!>(T0jo&ivn8-7 zs{Qa@cGy$rE-3AY0V(l8wjI^uB8Lchj@?L}fYal^>T9z;8juH@?rG&g-t+R2dVDBe zq!K%{e-rT5jX19`(bP23LUN4+_zh2KD~EAYzhpEO3MUG8@}uBHH@4J zd`>_(K4q&>*k82(dDuC)X6JuPrBBubOg7qZ{?x!r@{%0);*`h*^F|%o?&1wX?Wr4b z1~&cy#PUuES{C#xJ84!z<1tp9sfrR(i%Tu^jnXy;4`Xk;AQCdFC@?V%|; zySdC7qS|uQRcH}EFZH%mMB~7gi}a0utE}ZE_}8PQH8f;H%PN41Cb9R%w5Oi5el^fd z$n{3SqLCnrF##x?4sa^r!O$7NX!}&}V;0ZGQ&K&i%6$3C_dR%I7%gdQ;KT6YZiQrW zk%q<74oVBV>@}CvJ4Wj!d^?#Zwq(b$E1ze4$99DuNg?6t9H}k_|D7KWD7i0-g*EO7 z;5{hSIYE4DMOK3H%|f5Edx+S0VI0Yw!tsaRS2&Il2)ea^8R5TG72BrJue|f_{2UHa z@w;^c|K3da#$TB0P3;MPlF7RuQeXT$ zS<<|C0OF(k)>fr&wOB=gP8!Qm>F41u;3esv7_0l%QHt(~+n; zf!G6%hp;Gfa9L9=AceiZs~tK+Tf*Wof=4!u{nIO90jH@iS0l+#%8=~%ASzFv7zqSB^?!@N7)kp0t&tCGLmzXSRMRyxCmCYUD2!B`? zhs$4%KO~m=VFk3Buv9osha{v+mAEq=ik3RdK@;WWTV_g&-$U4IM{1IhGX{pAu%Z&H zFfwCpUsX%RKg);B@7OUzZ{Hn{q6Vv!3#8fAg!P$IEx<0vAx;GU%}0{VIsmFBPq_mb zpe^BChDK>sc-WLKl<6 zwbW|e&d&dv9Wu0goueyu>(JyPx1mz0v4E?cJjFuKF71Q1)AL8jHO$!fYT3(;U3Re* zPPOe%*O+@JYt1bW`!W_1!mN&=w3G9ru1XsmwfS~BJ))PhD(+_J_^N6j)sx5VwbWK| zwRyC?W<`pOCY)b#AS?rluxuuGf-AJ=D!M36l{ua?@SJ5>e!IBr3CXIxWw5xUZ@Xrw z_R@%?{>d%Ld4p}nEsiA@v*nc6Ah!MUs?GA7e5Q5lPpp0@`%5xY$C;{%rz24$;vR#* zBP=a{)K#CwIY%p} zXVdxTQ^HS@O&~eIftU+Qt^~(DGxrdi3k}DdT^I7Iy5SMOp$QuD8s;+93YQ!OY{eB24%xY7ml@|M7I(Nb@K_-?F;2?et|CKkuZK_>+>Lvg!>JE~wN`BI|_h6$qi!P)+K-1Hh(1;a`os z55)4Q{oJiA(lQM#;w#Ta%T0jDNXIPM_bgESMCDEg6rM33anEr}=|Fn6)|jBP6Y}u{ zv9@%7*#RI9;fv;Yii5CI+KrRdr0DKh=L>)eO4q$1zmcSmglsV`*N(x=&Wx`*v!!hn6X-l0 zP_m;X??O(skcj+oS$cIdKhfT%ABAzz3w^la-Ucw?yBPEC+=Pe_vU8nd-HV5YX6X8r zZih&j^eLU=%*;VzhUyoLF;#8QsEfmByk+Y~caBqSvQaaWf2a{JKB9B>V&r?l^rXaC z8)6AdR@Qy_BxQrE2Fk?ewD!SwLuMj@&d_n5RZFf7=>O>hzVE*seW3U?_p|R^CfoY`?|#x9)-*yjv#lo&zP=uI`M?J zbzC<^3x7GfXA4{FZ72{PE*-mNHyy59Q;kYG@BB~NhTd6pm2Oj=_ zizmD?MKVRkT^KmXuhsk?eRQllPo2Ubk=uCKiZ&u3Xjj~<(!M94c)Tez@9M1Gfs5JV z->@II)CDJOXTtPrQudNjE}Eltbjq>6KiwAwqvAKd^|g!exgLG3;wP+#mZYr`cy3#39e653d=jrR-ulW|h#ddHu(m9mFoW~2yE zz5?dB%6vF}+`-&-W8vy^OCxm3_{02royjvmwjlp+eQDzFVEUiyO#gLv%QdDSI#3W* z?3!lL8clTaNo-DVJw@ynq?q!%6hTQi35&^>P85G$TqNt78%9_sSJt2RThO|JzM$iL zg|wjxdMC2|Icc5rX*qPL(coL!u>-xxz-rFiC!6hD1IR%|HSRsV3>Kq~&vJ=s3M5y8SG%YBQ|{^l#LGlg!D?E>2yR*eV%9m$_J6VGQ~AIh&P$_aFbh zULr0Z$QE!QpkP=aAeR4ny<#3Fwyw@rZf4?Ewq`;mCVv}xaz+3ni+}a=k~P+yaWt^L z@w67!DqVf7D%7XtXX5xBW;Co|HvQ8WR1k?r2cZD%U;2$bsM%u8{JUJ5Z0k= zZJARv^vFkmWx15CB=rb=D4${+#DVqy5$C%bf`!T0+epLJLnh1jwCdb*zuCL}eEFvE z{rO1%gxg>1!W(I!owu*mJZ0@6FM(?C+d*CeceZRW_4id*D9p5nzMY&{mWqrJomjIZ z97ZNnZ3_%Hx8dn;H>p8m7F#^2;T%yZ3H;a&N7tm=Lvs&lgJLW{V1@h&6Vy~!+Ffbb zv(n3+v)_D$}dqd!2>Y2B)#<+o}LH#%ogGi2-?xRIH)1!SD)u-L65B&bsJTC=LiaF+YOCif2dUX6uAA|#+vNR z>U+KQekVGon)Yi<93(d!(yw1h3&X0N(PxN2{%vn}cnV?rYw z$N^}_o!XUB!mckL`yO1rnUaI4wrOeQ(+&k?2mi47hzxSD`N#-byqd1IhEoh!PGq>t z_MRy{5B0eKY>;Ao3z$RUU7U+i?iX^&r739F)itdrTpAi-NN0=?^m%?{A9Ly2pVv>Lqs6moTP?T2-AHqFD-o_ znVr|7OAS#AEH}h8SRPQ@NGG47dO}l=t07__+iK8nHw^(AHx&Wb<%jPc$$jl6_p(b$ z)!pi(0fQodCHfM)KMEMUR&UID>}m^(!{C^U7sBDOA)$VThRCI0_+2=( zV8mMq0R(#z;C|7$m>$>`tX+T|xGt(+Y48@ZYu#z;0pCgYgmMVbFb!$?%yhZqP_nhn zy4<#3P1oQ#2b51NU1mGnHP$cf0j-YOgAA}A$QoL6JVLcmExs(kU{4z;PBHJD%_=0F z>+sQV`mzijSIT7xn%PiDKHOujX;n|M&qr1T@rOxTdxtZ!&u&3HHFLYD5$RLQ=heur zb>+AFokUVQeJy-#LP*^)spt{mb@Mqe=A~-4p0b+Bt|pZ+@CY+%x}9f}izU5;4&QFE zO1bhg&A4uC1)Zb67kuowWY4xbo&J=%yoXlFB)&$d*-}kjBu|w!^zbD1YPc0-#XTJr z)pm2RDy%J3jlqSMq|o%xGS$bPwn4AqitC6&e?pqWcjWPt{3I{>CBy;hg0Umh#c;hU3RhCUX=8aR>rmd` z7Orw(5tcM{|-^J?ZAA9KP|)X6n9$-kvr#j5YDecTM6n z&07(nD^qb8hpF0B^z^pQ*%5ePYkv&FabrlI61ntiVp!!C8y^}|<2xgAd#FY=8b*y( zuQOuvy2`Ii^`VBNJB&R!0{hABYX55ooCAJSSevl4RPqEGb)iy_0H}v@vFwFzD%>#I>)3PsouQ+_Kkbqy*kKdHdfkN7NBcq%V{x^fSxgXpg7$bF& zj!6AQbDY(1u#1_A#1UO9AxiZaCVN2F0wGXdY*g@x$ByvUA?ePdide0dmr#}udE%K| z3*k}Vv2Ew2u1FXBaVA6aerI36R&rzEZeDDCl5!t0J=ug6kuNZzH>3i_VN`%BsaVB3 zQYw|Xub_SGf{)F{$ZX5`Jc!X!;eybjP+o$I{Z^Hsj@D=E{MnnL+TbC@HEU2DjG{3-LDGIbq()U87x4eS;JXnSh;lRlJ z>EL3D>wHt-+wTjQF$fGyDO$>d+(fq@bPpLBS~xA~R=3JPbS{tzN(u~m#Po!?H;IYv zE;?8%^vle|%#oux(Lj!YzBKv+Fd}*Ur-dCBoX*t{KeNM*n~ZPYJ4NNKkI^MFbz9!v z4(Bvm*Kc!-$%VFEewYJKz-CQN{`2}KX4*CeJEs+Q(!kI%hN1!1P6iOq?ovz}X0IOi z)YfWpwW@pK08^69#wSyCZkX9?uZD?C^@rw^Y?gLS_xmFKkooyx$*^5#cPqntNTtSG zlP>XLMj2!VF^0k#ole7`-c~*~+_T5ls?x4)ah(j8vo_ zwb%S8qoaZqY0-$ZI+ViIA_1~~rAH7K_+yFS{0rT@eQtTAdz#8E5VpwnW!zJ_^{Utv zlW5Iar3V5t&H4D6A=>?mq;G92;1cg9a2sf;gY9pJDVKn$DYdQlvfXq}zz8#LyPGq@ z+`YUMD;^-6w&r-82JL7mA8&M~Pj@aK!m{0+^v<|t%APYf7`}jGEhdYLqsHW-Le9TL z_hZZ1gbrz7$f9^fAzVIP30^KIz!!#+DRLL+qMszvI_BpOSmjtl$hh;&UeM{ER@INV zcI}VbiVTPoN|iSna@=7XkP&-4#06C};8ajbxJ4Gcq8(vWv4*&X8bM^T$mBk75Q92j z1v&%a;OSKc8EIrodmIiw$lOES2hzGDcjjB`kEDfJe{r}yE6`eZL zEB`9u>Cl0IsQ+t}`-cx}{6jqcANucqIB>Qmga_&<+80E2Q|VHHQ$YlAt{6`Qu`HA3 z03s0-sSlwbvgi&_R8s={6<~M^pGvBNjKOa>tWenzS8s zR>L7R5aZ=mSU{f?ib4Grx$AeFvtO5N|D>9#)ChH#Fny2maHWHOf2G=#<9Myot#+4u zWVa6d^Vseq_0=#AYS(-m$Lp;*8nC_6jXIjEM`omUmtH@QDs3|G)i4j*#_?#UYVZvJ z?YjT-?!4Q{BNun;dKBWLEw2C-VeAz`%?A>p;)PL}TAZn5j~HK>v1W&anteARlE+~+ zj>c(F;?qO3pXBb|#OZdQnm<4xWmn~;DR5SDMxt0UK_F^&eD|KZ=O;tO3vy4@4h^;2 zUL~-z`-P1aOe?|ZC1BgVsL)2^J-&vIFI%q@40w0{jjEfeVl)i9(~bt2z#2Vm)p`V_ z1;6$Ae7=YXk#=Qkd24Y23t&GvRxaOoad~NbJ+6pxqzJ>FY#Td7@`N5xp!n(c!=RE& z&<<@^a$_Ys8jqz4|5Nk#FY$~|FPC0`*a5HH!|Gssa9=~66&xG9)|=pOOJ2KE5|YrR zw!w6K2aC=J$t?L-;}5hn6mHd%hC;p8P|Dgh6D>hGnXPgi;6r+eA=?f72y9(Cf_ho{ zH6#)uD&R=73^$$NE;5piWX2bzR67fQ)`b=85o0eOLGI4c-Tb@-KNi2pz=Ke@SDcPn za$AxXib84`!Sf;Z3B@TSo`Dz7GM5Kf(@PR>Ghzi=BBxK8wRp>YQoXm+iL>H*Jo9M3 z6w&E?BC8AFTFT&Tv8zf+m9<&S&%dIaZ)Aoqkak_$r-2{$d~0g2oLETx9Y`eOAf14QXEQw3tJne;fdzl@wV#TFXSLXM2428F-Q}t+n2g%vPRMUzYPvzQ9f# zu(liiJem9P*?0%V@RwA7F53r~|I!Ty)<*AsMX3J{_4&}{6pT%Tpw>)^|DJ)>gpS~1rNEh z0$D?uO8mG?H;2BwM5a*26^7YO$XjUm40XmBsb63MoR;bJh63J;OngS5sSI+o2HA;W zdZV#8pDpC9Oez&L8loZO)MClRz!_!WD&QRtQxnazhT%Vj6Wl4G11nUk8*vSeVab@N#oJ}`KyJv+8Mo@T1-pqZ1t|?cnaVOd;1(h9 z!$DrN=jcGsVYE-0-n?oCJ^4x)F}E;UaD-LZUIzcD?W^ficqJWM%QLy6QikrM1aKZC zi{?;oKwq^Vsr|&`i{jIphA8S6G4)$KGvpULjH%9u(Dq247;R#l&I0{IhcC|oBF*Al zvLo7Xte=C{aIt*otJD}BUq)|_pdR>{zBMT< z(^1RpZv*l*m*OV^8>9&asGBo8h*_4q*)-eCv*|Pq=XNGrZE)^(SF7^{QE_~4VDB(o zVcPA_!G+2CAtLbl+`=Q~9iW`4ZRLku!uB?;tWqVjB0lEOf}2RD7dJ=BExy=<9wkb- z9&7{XFA%n#JsHYN8t5d~=T~5DcW4$B%3M+nNvC2`0!#@sckqlzo5;hhGi(D9=*A4` z5ynobawSPRtWn&CDLEs3Xf`(8^zDP=NdF~F^s&={l7(aw&EG}KWpMjtmz7j_VLO;@ zM2NVLDxZ@GIv7*gzl1 zjq78tv*8#WSY`}Su0&C;2F$Ze(q>F(@Wm^Gw!)(j;dk9Ad{STaxn)IV9FZhm*n+U} zi;4y*3v%A`_c7a__DJ8D1b@dl0Std3F||4Wtvi)fCcBRh!X9$1x!_VzUh>*S5s!oq z;qd{J_r79EL2wIeiGAqFstWtkfIJpjVh%zFo*=55B9Zq~y0=^iqHWfQl@O!Ak;(o*m!pZqe9 z%U2oDOhR)BvW8&F70L;2TpkzIutIvNQaTjjs5V#8mV4!NQ}zN=i`i@WI1z0eN-iCS z;vL-Wxc^Vc_qK<5RPh(}*8dLT{~GzE{w2o$2kMFaEl&q zP{V=>&3kW7tWaK-Exy{~`v4J0U#OZBk{a9{&)&QG18L@6=bsZ1zC_d{{pKZ-Ey>I> z;8H0t4bwyQqgu4hmO`3|4K{R*5>qnQ&gOfdy?z`XD%e5+pTDzUt3`k^u~SaL&XMe= z9*h#kT(*Q9jO#w2Hd|Mr-%DV8i_1{J1MU~XJ3!WUplhXDYBpJH><0OU`**nIvPIof z|N8@I=wA)sf45SAvx||f?Z5uB$kz1qL3Ky_{%RPdP5iN-D2!p5scq}buuC00C@jom zhfGKm3|f?Z0iQ|K$Z~!`8{nmAS1r+fp6r#YDOS8V*;K&Gs7Lc&f^$RC66O|)28oh`NHy&vq zJh+hAw8+ybTB0@VhWN^0iiTnLsCWbS_y`^gs!LX!Lw{yE``!UVzrV24tP8o;I6-65 z1MUiHw^{bB15tmrVT*7-#sj6cs~z`wk52YQJ*TG{SE;KTm#Hf#a~|<(|ImHH17nNM z`Ub{+J3dMD!)mzC8b(2tZtokKW5pAwHa?NFiso~# z1*iaNh4lQ4TS)|@G)H4dZV@l*Vd;Rw;-;odDhW2&lJ%m@jz+Panv7LQm~2Js6rOW3 z0_&2cW^b^MYW3)@o;neZ<{B4c#m48dAl$GCc=$>ErDe|?y@z`$uq3xd(%aAsX)D%l z>y*SQ%My`yDP*zof|3@_w#cjaW_YW4BdA;#Glg1RQcJGY*CJ9`H{@|D+*e~*457kd z73p<%fB^PV!Ybw@)Dr%(ZJbX}xmCStCYv#K3O32ej{$9IzM^I{6FJ8!(=azt7RWf4 z7ib0UOPqN40X!wOnFOoddd8`!_IN~9O)#HRTyjfc#&MCZ zZAMzOVB=;qwt8gV?{Y2?b=iSZG~RF~uyx18K)IDFLl})G1v@$(s{O4@RJ%OTJyF+Cpcx4jmy|F3euCnMK!P2WTDu5j z{{gD$=M*pH!GGzL%P)V2*ROm>!$Y=z|D`!_yY6e7SU$~a5q8?hZGgaYqaiLnkK%?0 zs#oI%;zOxF@g*@(V4p!$7dS1rOr6GVs6uYCTt2h)eB4?(&w8{#o)s#%gN@BBosRUe z)@P@8_Zm89pr~)b>e{tbPC~&_MR--iB{=)y;INU5#)@Gix-YpgP<-c2Ms{9zuCX|3 z!p(?VaXww&(w&uBHzoT%!A2=3HAP>SDxcljrego7rY|%hxy3XlODWffO_%g|l+7Y_ zqV(xbu)s4lV=l7M;f>vJl{`6qBm>#ZeMA}kXb97Z)?R97EkoI?x6Lp0yu1Z>PS?2{ z0QQ(8D)|lc9CO3B~e(pQM&5(1y&y=e>C^X$`)_&XuaI!IgDTVqt31wX#n+@!a_A0ZQkA zCJ2@M_4Gb5MfCrm5UPggeyh)8 zO9?`B0J#rkoCx(R0I!ko_2?iO@|oRf1;3r+i)w-2&j?=;NVIdPFsB)`|IC0zk6r9c zRrkfxWsiJ(#8QndNJj@{@WP2Ackr|r1VxV{7S&rSU(^)-M8gV>@UzOLXu9K<{6e{T zXJ6b92r$!|lwjhmgqkdswY&}c)KW4A)-ac%sU;2^fvq7gfUW4Bw$b!i@duy1CAxSn z(pyh$^Z=&O-q<{bZUP+$U}=*#M9uVc>CQVgDs4swy5&8RAHZ~$)hrTF4W zPsSa~qYv_0mJnF89RnnJTH`3}w4?~epFl=D(35$ zWa07ON$`OMBOHgCmfO(9RFc<)?$x)N}Jd2A(<*Ll7+4jrRt9w zwGxExUXd9VB#I|DwfxvJ;HZ8Q{37^wDhaZ%O!oO(HpcqfLH%#a#!~;Jl7F5>EX_=8 z{()l2NqPz>La3qJR;_v+wlK>GsHl;uRA8%j`A|yH@k5r%55S9{*Cp%uw6t`qc1!*T za2OeqtQj7sAp#Q~=5Fs&aCR9v>5V+s&RdNvo&H~6FJOjvaj--2sYYBvMq;55%z8^o z|BJDA4vzfow#DO#ZQHh;Oq_{r+qP{R9ox2TOgwQiv7Ow!zjN+A@BN;0tA2lUb#+zO z(^b89eV)D7UVE+h{mcNc6&GtpOqDn_?VAQ)Vob$hlFwW%xh>D#wml{t&Ofmm_d_+; zKDxzdr}`n2Rw`DtyIjrG)eD0vut$}dJAZ0AohZ+ZQdWXn_Z@dI_y=7t3q8x#pDI-K z2VVc&EGq445Rq-j0=U=Zx`oBaBjsefY;%)Co>J3v4l8V(T8H?49_@;K6q#r~Wwppc z4XW0(4k}cP=5ex>-Xt3oATZ~bBWKv)aw|I|Lx=9C1s~&b77idz({&q3T(Y(KbWO?+ zmcZ6?WeUsGk6>km*~234YC+2e6Zxdl~<_g2J|IE`GH%n<%PRv-50; zH{tnVts*S5*_RxFT9eM0z-pksIb^drUq4>QSww=u;UFCv2AhOuXE*V4z?MM`|ABOC4P;OfhS(M{1|c%QZ=!%rQTDFx`+}?Kdx$&FU?Y<$x;j7z=(;Lyz+?EE>ov!8vvMtSzG!nMie zsBa9t8as#2nH}n8xzN%W%U$#MHNXmDUVr@GX{?(=yI=4vks|V)!-W5jHsU|h_&+kY zS_8^kd3jlYqOoiI`ZqBVY!(UfnAGny!FowZWY_@YR0z!nG7m{{)4OS$q&YDyw6vC$ zm4!$h>*|!2LbMbxS+VM6&DIrL*X4DeMO!@#EzMVfr)e4Tagn~AQHIU8?e61TuhcKD zr!F4(kEebk(Wdk-?4oXM(rJwanS>Jc%<>R(siF+>+5*CqJLecP_we33iTFTXr6W^G z7M?LPC-qFHK;E!fxCP)`8rkxZyFk{EV;G-|kwf4b$c1k0atD?85+|4V%YATWMG|?K zLyLrws36p%Qz6{}>7b>)$pe>mR+=IWuGrX{3ZPZXF3plvuv5Huax86}KX*lbPVr}L z{C#lDjdDeHr~?l|)Vp_}T|%$qF&q#U;ClHEPVuS+Jg~NjC1RP=17=aQKGOcJ6B3mp z8?4*-fAD~}sX*=E6!}^u8)+m2j<&FSW%pYr_d|p_{28DZ#Cz0@NF=gC-o$MY?8Ca8 zr5Y8DSR^*urS~rhpX^05r30Ik#2>*dIOGxRm0#0YX@YQ%Mg5b6dXlS!4{7O_kdaW8PFSdj1=ryI-=5$fiieGK{LZ+SX(1b=MNL!q#lN zv98?fqqTUH8r8C7v(cx#BQ5P9W>- zmW93;eH6T`vuJ~rqtIBg%A6>q>gnWb3X!r0wh_q;211+Om&?nvYzL1hhtjB zK_7G3!n7PL>d!kj){HQE zE8(%J%dWLh1_k%gVXTZt zEdT09XSKAx27Ncaq|(vzL3gm83q>6CAw<$fTnMU05*xAe&rDfCiu`u^1)CD<>sx0i z*hr^N_TeN89G(nunZoLBf^81#pmM}>JgD@Nn1l*lN#a=B=9pN%tmvYFjFIoKe_(GF z-26x{(KXdfsQL7Uv6UtDuYwV`;8V3w>oT_I<`Ccz3QqK9tYT5ZQzbop{=I=!pMOCb zCU68`n?^DT%^&m>A%+-~#lvF!7`L7a{z<3JqIlk1$<||_J}vW1U9Y&eX<}l8##6i( zZcTT@2`9(Mecptm@{3A_Y(X`w9K0EwtPq~O!16bq{7c0f7#(3wn-^)h zxV&M~iiF!{-6A@>o;$RzQ5A50kxXYj!tcgme=Qjrbje~;5X2xryU;vH|6bE(8z^<7 zQ>BG7_c*JG8~K7Oe68i#0~C$v?-t@~@r3t2inUnLT(c=URpA9kA8uq9PKU(Ps(LVH zqgcqW>Gm?6oV#AldDPKVRcEyQIdTT`Qa1j~vS{<;SwyTdr&3*t?J)y=M7q*CzucZ&B0M=joT zBbj@*SY;o2^_h*>R0e({!QHF0=)0hOj^B^d*m>SnRrwq>MolNSgl^~r8GR#mDWGYEIJA8B<|{{j?-7p zVnV$zancW3&JVDtVpIlI|5djKq0(w$KxEFzEiiL=h5Jw~4Le23@s(mYyXWL9SX6Ot zmb)sZaly_P%BeX_9 zw&{yBef8tFm+%=--m*J|o~+Xg3N+$IH)t)=fqD+|fEk4AAZ&!wcN5=mi~Vvo^i`}> z#_3ahR}Ju)(Px7kev#JGcSwPXJ2id9%Qd2A#Uc@t8~egZ8;iC{e! z%=CGJOD1}j!HW_sgbi_8suYnn4#Ou}%9u)dXd3huFIb!ytlX>Denx@pCS-Nj$`VO&j@(z!kKSP0hE4;YIP#w9ta=3DO$7f*x zc9M4&NK%IrVmZAe=r@skWD`AEWH=g+r|*13Ss$+{c_R!b?>?UaGXlw*8qDmY#xlR= z<0XFbs2t?8i^G~m?b|!Hal^ZjRjt<@a? z%({Gn14b4-a|#uY^=@iiKH+k?~~wTj5K1A&hU z2^9-HTC)7zpoWK|$JXaBL6C z#qSNYtY>65T@Zs&-0cHeu|RX(Pxz6vTITdzJdYippF zC-EB+n4}#lM7`2Ry~SO>FxhKboIAF#Z{1wqxaCb{#yEFhLuX;Rx(Lz%T`Xo1+a2M}7D+@wol2)OJs$TwtRNJ={( zD@#zTUEE}#Fz#&(EoD|SV#bayvr&E0vzmb%H?o~46|FAcx?r4$N z&67W3mdip-T1RIxwSm_&(%U|+WvtGBj*}t69XVd&ebn>KOuL(7Y8cV?THd-(+9>G7*Nt%T zcH;`p={`SOjaf7hNd(=37Lz3-51;58JffzIPgGs_7xIOsB5p2t&@v1mKS$2D$*GQ6 zM(IR*j4{nri7NMK9xlDy-hJW6sW|ZiDRaFiayj%;(%51DN!ZCCCXz+0Vm#};70nOx zJ#yA0P3p^1DED;jGdPbQWo0WATN=&2(QybbVdhd=Vq*liDk`c7iZ?*AKEYC#SY&2g z&Q(Ci)MJ{mEat$ZdSwTjf6h~roanYh2?9j$CF@4hjj_f35kTKuGHvIs9}Re@iKMxS-OI*`0S z6s)fOtz}O$T?PLFVSeOjSO26$@u`e<>k(OSP!&YstH3ANh>)mzmKGNOwOawq-MPXe zy4xbeUAl6tamnx))-`Gi2uV5>9n(73yS)Ukma4*7fI8PaEwa)dWHs6QA6>$}7?(L8 ztN8M}?{Tf!Zu22J5?2@95&rQ|F7=FK-hihT-vDp!5JCcWrVogEnp;CHenAZ)+E+K5 z$Cffk5sNwD_?4+ymgcHR(5xgt20Z8M`2*;MzOM#>yhk{r3x=EyM226wb&!+j`W<%* zSc&|`8!>dn9D@!pYow~(DsY_naSx7(Z4i>cu#hA5=;IuI88}7f%)bRkuY2B;+9Uep zpXcvFWkJ!mQai63BgNXG26$5kyhZ2&*3Q_tk)Ii4M>@p~_~q_cE!|^A;_MHB;7s#9 zKzMzK{lIxotjc};k67^Xsl-gS!^*m*m6kn|sbdun`O?dUkJ{0cmI0-_2y=lTAfn*Y zKg*A-2sJq)CCJgY0LF-VQvl&6HIXZyxo2#!O&6fOhbHXC?%1cMc6y^*dOS{f$=137Ds1m01qs`>iUQ49JijsaQ( zksqV9@&?il$|4Ua%4!O15>Zy&%gBY&wgqB>XA3!EldQ%1CRSM(pp#k~-pkcCg4LAT zXE=puHbgsw)!xtc@P4r~Z}nTF=D2~j(6D%gTBw$(`Fc=OOQ0kiW$_RDd=hcO0t97h zb86S5r=>(@VGy1&#S$Kg_H@7G^;8Ue)X5Y+IWUi`o;mpvoV)`fcVk4FpcT|;EG!;? zHG^zrVVZOm>1KFaHlaogcWj(v!S)O(Aa|Vo?S|P z5|6b{qkH(USa*Z7-y_Uvty_Z1|B{rTS^qmEMLEYUSk03_Fg&!O3BMo{b^*`3SHvl0 zhnLTe^_vVIdcSHe)SQE}r~2dq)VZJ!aSKR?RS<(9lzkYo&dQ?mubnWmgMM37Nudwo z3Vz@R{=m2gENUE3V4NbIzAA$H1z0pagz94-PTJyX{b$yndsdKptmlKQKaaHj@3=ED zc7L?p@%ui|RegVYutK$64q4pe9+5sv34QUpo)u{1ci?)_7gXQd{PL>b0l(LI#rJmN zGuO+%GO`xneFOOr4EU(Wg}_%bhzUf;d@TU+V*2#}!2OLwg~%D;1FAu=Un>OgjPb3S z7l(riiCwgghC=Lm5hWGf5NdGp#01xQ59`HJcLXbUR3&n%P(+W2q$h2Qd z*6+-QXJ*&Kvk9ht0f0*rO_|FMBALen{j7T1l%=Q>gf#kma zQlg#I9+HB+z*5BMxdesMND`_W;q5|FaEURFk|~&{@qY32N$G$2B=&Po{=!)x5b!#n zxLzblkq{yj05#O7(GRuT39(06FJlalyv<#K4m}+vs>9@q-&31@1(QBv82{}Zkns~K ze{eHC_RDX0#^A*JQTwF`a=IkE6Ze@j#-8Q`tTT?k9`^ZhA~3eCZJ-Jr{~7Cx;H4A3 zcZ+Zj{mzFZbVvQ6U~n>$U2ZotGsERZ@}VKrgGh0xM;Jzt29%TX6_&CWzg+YYMozrM z`nutuS)_0dCM8UVaKRj804J4i%z2BA_8A4OJRQ$N(P9Mfn-gF;4#q788C@9XR0O3< zsoS4wIoyt046d+LnSCJOy@B@Uz*#GGd#+Ln1ek5Dv>(ZtD@tgZlPnZZJGBLr^JK+!$$?A_fA3LOrkoDRH&l7 zcMcD$Hsjko3`-{bn)jPL6E9Ds{WskMrivsUu5apD z?grQO@W7i5+%X&E&p|RBaEZ(sGLR@~(y^BI@lDMot^Ll?!`90KT!JXUhYS`ZgX3jnu@Ja^seA*M5R@f`=`ynQV4rc$uT1mvE?@tz)TN<=&H1%Z?5yjxcpO+6y_R z6EPuPKM5uxKpmZfT(WKjRRNHs@ib)F5WAP7QCADvmCSD#hPz$V10wiD&{NXyEwx5S z6NE`3z!IS^$s7m}PCwQutVQ#~w+V z=+~->DI*bR2j0^@dMr9`p>q^Ny~NrAVxrJtX2DUveic5vM%#N*XO|?YAWwNI$Q)_) zvE|L(L1jP@F%gOGtnlXtIv2&1i8q<)Xfz8O3G^Ea~e*HJsQgBxWL(yuLY+jqUK zRE~`-zklrGog(X}$9@ZVUw!8*=l`6mzYLtsg`AvBYz(cxmAhr^j0~(rzXdiOEeu_p zE$sf2(w(BPAvO5DlaN&uQ$4@p-b?fRs}d7&2UQ4Fh?1Hzu*YVjcndqJLw0#q@fR4u zJCJ}>_7-|QbvOfylj+e^_L`5Ep9gqd>XI3-O?Wp z-gt*P29f$Tx(mtS`0d05nHH=gm~Po_^OxxUwV294BDKT>PHVlC5bndncxGR!n(OOm znsNt@Q&N{TLrmsoKFw0&_M9$&+C24`sIXGWgQaz=kY;S{?w`z^Q0JXXBKFLj0w0U6P*+jPKyZHX9F#b0D1$&(- zrm8PJd?+SrVf^JlfTM^qGDK&-p2Kdfg?f>^%>1n8bu&byH(huaocL>l@f%c*QkX2i znl}VZ4R1en4S&Bcqw?$=Zi7ohqB$Jw9x`aM#>pHc0x z0$!q7iFu zZ`tryM70qBI6JWWTF9EjgG@>6SRzsd}3h+4D8d~@CR07P$LJ}MFsYi-*O%XVvD@yT|rJ+Mk zDllJ7$n0V&A!0flbOf)HE6P_afPWZmbhpliqJuw=-h+r;WGk|ntkWN(8tKlYpq5Ow z(@%s>IN8nHRaYb*^d;M(D$zGCv5C|uqmsDjwy4g=Lz>*OhO3z=)VD}C<65;`89Ye} zSCxrv#ILzIpEx1KdLPlM&%Cctf@FqTKvNPXC&`*H9=l=D3r!GLM?UV zOxa(8ZsB`&+76S-_xuj?G#wXBfDY@Z_tMpXJS7^mp z@YX&u0jYw2A+Z+bD#6sgVK5ZgdPSJV3>{K^4~%HV?rn~4D)*2H!67Y>0aOmzup`{D zzDp3c9yEbGCY$U<8biJ_gB*`jluz1ShUd!QUIQJ$*1;MXCMApJ^m*Fiv88RZ zFopLViw}{$Tyhh_{MLGIE2~sZ)t0VvoW%=8qKZ>h=adTe3QM$&$PO2lfqH@brt!9j ziePM8$!CgE9iz6B<6_wyTQj?qYa;eC^{x_0wuwV~W+^fZmFco-o%wsKSnjXFEx02V zF5C2t)T6Gw$Kf^_c;Ei3G~uC8SM-xyycmXyC2hAVi-IfXqhu$$-C=*|X?R0~hu z8`J6TdgflslhrmDZq1f?GXF7*ALeMmOEpRDg(s*H`4>_NAr`2uqF;k;JQ+8>A|_6ZNsNLECC%NNEb1Y1dP zbIEmNpK)#XagtL4R6BC{C5T(+=yA-(Z|Ap}U-AfZM#gwVpus3(gPn}Q$CExObJ5AC z)ff9Yk?wZ}dZ-^)?cbb9Fw#EjqQ8jxF4G3=L?Ra zg_)0QDMV1y^A^>HRI$x?Op@t;oj&H@1xt4SZ9(kifQ zb59B*`M99Td7@aZ3UWvj1rD0sE)d=BsBuW*KwkCds7ay(7*01_+L}b~7)VHI>F_!{ zyxg-&nCO?v#KOUec0{OOKy+sjWA;8rTE|Lv6I9H?CI?H(mUm8VXGwU$49LGpz&{nQp2}dinE1@lZ1iox6{ghN&v^GZv9J${7WaXj)<0S4g_uiJ&JCZ zr8-hsu`U%N;+9N^@&Q0^kVPB3)wY(rr}p7{p0qFHb3NUUHJb672+wRZs`gd1UjKPX z4o6zljKKA+Kkj?H>Ew63o%QjyBk&1!P22;MkD>sM0=z_s-G{mTixJCT9@_|*(p^bz zJ8?ZZ&;pzV+7#6Mn`_U-)k8Pjg?a;|Oe^us^PoPY$Va~yi8|?+&=y$f+lABT<*pZr zP}D{~Pq1Qyni+@|aP;ixO~mbEW9#c0OU#YbDZIaw=_&$K%Ep2f%hO^&P67hApZe`x zv8b`Mz@?M_7-)b!lkQKk)JXXUuT|B8kJlvqRmRpxtQDgvrHMXC1B$M@Y%Me!BSx3P z#2Eawl$HleZhhTS6Txm>lN_+I`>eV$&v9fOg)%zVn3O5mI*lAl>QcHuW6!Kixmq`X zBCZ*Ck6OYtDiK!N47>jxI&O2a9x7M|i^IagRr-fmrmikEQGgw%J7bO|)*$2FW95O4 zeBs>KR)izRG1gRVL;F*sr8A}aRHO0gc$$j&ds8CIO1=Gwq1%_~E)CWNn9pCtBE}+`Jelk4{>S)M)`Ll=!~gnn1yq^EX(+y*ik@3Ou0qU`IgYi3*doM+5&dU!cho$pZ zn%lhKeZkS72P?Cf68<#kll_6OAO26bIbueZx**j6o;I0cS^XiL`y+>{cD}gd%lux} z)3N>MaE24WBZ}s0ApfdM;5J_Ny}rfUyxfkC``Awo2#sgLnGPewK};dORuT?@I6(5~ z?kE)Qh$L&fwJXzK){iYx!l5$Tt|^D~MkGZPA}(o6f7w~O2G6Vvzdo*a;iXzk$B66$ zwF#;wM7A+(;uFG4+UAY(2`*3XXx|V$K8AYu#ECJYSl@S=uZW$ksfC$~qrrbQj4??z-)uz0QL}>k^?fPnJTPw% zGz)~?B4}u0CzOf@l^um}HZzbaIwPmb<)< zi_3@E9lc)Qe2_`*Z^HH;1CXOceL=CHpHS{HySy3T%<^NrWQ}G0i4e1xm_K3(+~oi$ zoHl9wzb?Z4j#90DtURtjtgvi7uw8DzHYmtPb;?%8vb9n@bszT=1qr)V_>R%s!92_` zfnHQPANx z<#hIjIMm#*(v*!OXtF+w8kLu`o?VZ5k7{`vw{Yc^qYclpUGIM_PBN1+c{#Vxv&E*@ zxg=W2W~JuV{IuRYw3>LSI1)a!thID@R=bU+cU@DbR^_SXY`MC7HOsCN z!dO4OKV7(E_Z8T#8MA1H`99?Z!r0)qKW_#|29X3#Jb+5+>qUidbeP1NJ@)(qi2S-X zao|f0_tl(O+$R|Qwd$H{_ig|~I1fbp_$NkI!0E;Y z6JrnU{1Ra6^on{9gUUB0mwzP3S%B#h0fjo>JvV~#+X0P~JV=IG=yHG$O+p5O3NUgG zEQ}z6BTp^Fie)Sg<){Z&I8NwPR(=mO4joTLHkJ>|Tnk23E(Bo`FSbPc05lF2-+)X? z6vV3*m~IBHTy*^E!<0nA(tCOJW2G4DsH7)BxLV8kICn5lu6@U*R`w)o9;Ro$i8=Q^V%uH8n3q=+Yf;SFRZu z!+F&PKcH#8cG?aSK_Tl@K9P#8o+jry@gdexz&d(Q=47<7nw@e@FFfIRNL9^)1i@;A z28+$Z#rjv-wj#heI|<&J_DiJ*s}xd-f!{J8jfqOHE`TiHHZVIA8CjkNQ_u;Ery^^t zl1I75&u^`1_q)crO+JT4rx|z2ToSC>)Or@-D zy3S>jW*sNIZR-EBsfyaJ+Jq4BQE4?SePtD2+jY8*%FsSLZ9MY>+wk?}}}AFAw)vr{ml)8LUG-y9>^t!{~|sgpxYc0Gnkg`&~R z-pilJZjr@y5$>B=VMdZ73svct%##v%wdX~9fz6i3Q-zOKJ9wso+h?VME7}SjL=!NUG{J?M&i!>ma`eoEa@IX`5G>B1(7;%}M*%-# zfhJ(W{y;>MRz!Ic8=S}VaBKqh;~7KdnGEHxcL$kA-6E~=!hrN*zw9N+_=odt<$_H_8dbo;0=42wcAETPCVGUr~v(`Uai zb{=D!Qc!dOEU6v)2eHSZq%5iqK?B(JlCq%T6av$Cb4Rko6onlG&?CqaX7Y_C_cOC3 zYZ;_oI(}=>_07}Oep&Ws7x7-R)cc8zfe!SYxJYP``pi$FDS)4Fvw5HH=FiU6xfVqIM!hJ;Rx8c0cB7~aPtNH(Nmm5Vh{ibAoU#J6 zImRCr?(iyu_4W_6AWo3*vxTPUw@vPwy@E0`(>1Qi=%>5eSIrp^`` zK*Y?fK_6F1W>-7UsB)RPC4>>Ps9)f+^MqM}8AUm@tZ->j%&h1M8s*s!LX5&WxQcAh z8mciQej@RPm?660%>{_D+7er>%zX_{s|$Z+;G7_sfNfBgY(zLB4Ey}J9F>zX#K0f6 z?dVNIeEh?EIShmP6>M+d|0wMM85Sa4diw1hrg|ITJ}JDg@o8y>(rF9mXk5M z2@D|NA)-7>wD&wF;S_$KS=eE84`BGw3g0?6wGxu8ys4rwI?9U=*^VF22t3%mbGeOh z`!O-OpF7#Vceu~F`${bW0nYVU9ecmk31V{tF%iv&5hWofC>I~cqAt@u6|R+|HLMMX zVxuSlMFOK_EQ86#E8&KwxIr8S9tj_goWtLv4f@!&h8;Ov41{J~496vp9vX=(LK#j! zAwi*21RAV-LD>9Cw3bV_9X(X3)Kr0-UaB*7Y>t82EQ%!)(&(XuAYtTsYy-dz+w=$ir)VJpe!_$ z6SGpX^i(af3{o=VlFPC);|J8#(=_8#vdxDe|Cok+ANhYwbE*FO`Su2m1~w+&9<_9~ z-|tTU_ACGN`~CNW5WYYBn^B#SwZ(t4%3aPp z;o)|L6Rk569KGxFLUPx@!6OOa+5OjQLK5w&nAmwxkC5rZ|m&HT8G%GVZxB_@ME z>>{rnXUqyiJrT(8GMj_ap#yN_!9-lO5e8mR3cJiK3NE{_UM&=*vIU`YkiL$1%kf+1 z4=jk@7EEj`u(jy$HnzE33ZVW_J4bj}K;vT?T91YlO(|Y0FU4r+VdbmQ97%(J5 zkK*Bed8+C}FcZ@HIgdCMioV%A<*4pw_n}l*{Cr4}a(lq|injK#O?$tyvyE`S%(1`H z_wwRvk#13ElkZvij2MFGOj`fhy?nC^8`Zyo%yVcUAfEr8x&J#A{|moUBAV_^f$hpaUuyQeY3da^ zS9iRgf87YBwfe}>BO+T&Fl%rfpZh#+AM?Dq-k$Bq`vG6G_b4z%Kbd&v>qFjow*mBl z-OylnqOpLg}or7_VNwRg2za3VBK6FUfFX{|TD z`Wt0Vm2H$vdlRWYQJqDmM?JUbVqL*ZQY|5&sY*?!&%P8qhA~5+Af<{MaGo(dl&C5t zE%t!J0 zh6jqANt4ABdPxSTrVV}fLsRQal*)l&_*rFq(Ez}ClEH6LHv{J#v?+H-BZ2)Wy{K@9 z+ovXHq~DiDvm>O~r$LJo!cOuwL+Oa--6;UFE2q@g3N8Qkw5E>ytz^(&($!O47+i~$ zKM+tkAd-RbmP{s_rh+ugTD;lriL~`Xwkad#;_aM?nQ7L_muEFI}U_4$phjvYgleK~`Fo`;GiC07&Hq1F<%p;9Q;tv5b?*QnR%8DYJH3P>Svmv47Y>*LPZJy8_{9H`g6kQpyZU{oJ`m%&p~D=K#KpfoJ@ zn-3cqmHsdtN!f?~w+(t+I`*7GQA#EQC^lUA9(i6=i1PqSAc|ha91I%X&nXzjYaM{8$s&wEx@aVkQ6M{E2 zfzId#&r(XwUNtPcq4Ngze^+XaJA1EK-%&C9j>^9(secqe{}z>hR5CFNveMsVA)m#S zk)_%SidkY-XmMWlVnQ(mNJ>)ooszQ#vaK;!rPmGKXV7am^_F!Lz>;~{VrIO$;!#30XRhE1QqO_~#+Ux;B_D{Nk=grn z8Y0oR^4RqtcYM)7a%@B(XdbZCOqnX#fD{BQTeLvRHd(irHKq=4*jq34`6@VAQR8WG z^%)@5CXnD_T#f%@-l${>y$tfb>2LPmc{~5A82|16mH)R?&r#KKLs7xpN-D`=&Cm^R zvMA6#Ahr<3X>Q7|-qfTY)}32HkAz$_mibYV!I)u>bmjK`qwBe(>za^0Kt*HnFbSdO z1>+ryKCNxmm^)*$XfiDOF2|{-v3KKB?&!(S_Y=Ht@|ir^hLd978xuI&N{k>?(*f8H z=ClxVJK_%_z1TH0eUwm2J+2To7FK4o+n_na)&#VLn1m;!+CX+~WC+qg1?PA~KdOlC zW)C@pw75_xoe=w7i|r9KGIvQ$+3K?L{7TGHwrQM{dCp=Z*D}3kX7E-@sZnup!BImw z*T#a=+WcTwL78exTgBn|iNE3#EsOorO z*kt)gDzHiPt07fmisA2LWN?AymkdqTgr?=loT7z@d`wnlr6oN}@o|&JX!yPzC*Y8d zu6kWlTzE1)ckyBn+0Y^HMN+GA$wUO_LN6W>mxCo!0?oiQvT`z$jbSEu&{UHRU0E8# z%B^wOc@S!yhMT49Y)ww(Xta^8pmPCe@eI5C*ed96)AX9<>))nKx0(sci8gwob_1}4 z0DIL&vsJ1_s%<@y%U*-eX z5rN&(zef-5G~?@r79oZGW1d!WaTqQn0F6RIOa9tJ=0(kdd{d1{<*tHT#cCvl*i>YY zH+L7jq8xZNcTUBqj(S)ztTU!TM!RQ}In*n&Gn<>(60G7}4%WQL!o>hbJqNDSGwl#H z`4k+twp0cj%PsS+NKaxslAEu9!#U3xT1|_KB6`h=PI0SW`P9GTa7caD1}vKEglV8# zjKZR`pluCW19c2fM&ZG)c3T3Um;ir3y(tSCJ7Agl6|b524dy5El{^EQBG?E61H0XY z`bqg!;zhGhyMFl&(o=JWEJ8n~z)xI}A@C0d2hQGvw7nGv)?POU@(kS1m=%`|+^ika zXl8zjS?xqW$WlO?Ewa;vF~XbybHBor$f<%I&*t$F5fynwZlTGj|IjZtVfGa7l&tK} zW>I<69w(cZLu)QIVG|M2xzW@S+70NinQzk&Y0+3WT*cC)rx~04O-^<{JohU_&HL5XdUKW!uFy|i$FB|EMu0eUyW;gsf`XfIc!Z0V zeK&*hPL}f_cX=@iv>K%S5kL;cl_$v?n(Q9f_cChk8Lq$glT|=e+T*8O4H2n<=NGmn z+2*h+v;kBvF>}&0RDS>)B{1!_*XuE8A$Y=G8w^qGMtfudDBsD5>T5SB;Qo}fSkkiV ze^K^M(UthkwrD!&*tTsu>Dacdj_q`~V%r_twr$(Ct&_dKeeXE?fA&4&yASJWJ*}~- zel=@W)tusynfC_YqH4ll>4Eg`Xjs5F7Tj>tTLz<0N3)X<1px_d2yUY>X~y>>93*$) z5PuNMQLf9Bu?AAGO~a_|J2akO1M*@VYN^VxvP0F$2>;Zb9;d5Yfd8P%oFCCoZE$ z4#N$^J8rxYjUE_6{T%Y>MmWfHgScpuGv59#4u6fpTF%~KB^Ae`t1TD_^Ud#DhL+Dm zbY^VAM#MrAmFj{3-BpVSWph2b_Y6gCnCAombVa|1S@DU)2r9W<> zT5L8BB^er3zxKt1v(y&OYk!^aoQisqU zH(g@_o)D~BufUXcPt!Ydom)e|aW{XiMnes2z&rE?og>7|G+tp7&^;q?Qz5S5^yd$i z8lWr4g5nctBHtigX%0%XzIAB8U|T6&JsC4&^hZBw^*aIcuNO47de?|pGXJ4t}BB`L^d8tD`H`i zqrP8?#J@8T#;{^B!KO6J=@OWKhAerih(phML`(Rg7N1XWf1TN>=Z3Do{l_!d~DND&)O)D>ta20}@Lt77qSnVsA7>)uZAaT9bsB>u&aUQl+7GiY2|dAEg@%Al3i316y;&IhQL^8fw_nwS>f60M_-m+!5)S_6EPM7Y)(Nq^8gL7(3 zOiot`6Wy6%vw~a_H?1hLVzIT^i1;HedHgW9-P#)}Y6vF%C=P70X0Tk^z9Te@kPILI z_(gk!k+0%CG)%!WnBjjw*kAKs_lf#=5HXC00s-}oM-Q1aXYLj)(1d!_a7 z*Gg4Fe6F$*ujVjI|79Z5+Pr`us%zW@ln++2l+0hsngv<{mJ%?OfSo_3HJXOCys{Ug z00*YR-(fv<=&%Q!j%b-_ppA$JsTm^_L4x`$k{VpfLI(FMCap%LFAyq;#ns5bR7V+x zO!o;c5y~DyBPqdVQX)8G^G&jWkBy2|oWTw>)?5u}SAsI$RjT#)lTV&Rf8;>u*qXnb z8F%Xb=7#$m)83z%`E;49)t3fHInhtc#kx4wSLLms!*~Z$V?bTyUGiS&m>1P(952(H zuHdv=;o*{;5#X-uAyon`hP}d#U{uDlV?W?_5UjJvf%11hKwe&(&9_~{W)*y1nR5f_ z!N(R74nNK`y8>B!0Bt_Vr!;nc3W>~RiKtGSBkNlsR#-t^&;$W#)f9tTlZz>n*+Fjz z3zXZ;jf(sTM(oDzJt4FJS*8c&;PLTW(IQDFs_5QPy+7yhi1syPCarvqrHFcf&yTy)^O<1EBx;Ir`5W{TIM>{8w&PB>ro4;YD<5LF^TjTb0!zAP|QijA+1Vg>{Afv^% zmrkc4o6rvBI;Q8rj4*=AZacy*n8B{&G3VJc)so4$XUoie0)vr;qzPZVbb<#Fc=j+8CGBWe$n|3K& z_@%?{l|TzKSlUEO{U{{%Fz_pVDxs7i9H#bnbCw7@4DR=}r_qV!Zo~CvD4ZI*+j3kO zW6_=|S`)(*gM0Z;;}nj`73OigF4p6_NPZQ-Od~e$c_);;4-7sR>+2u$6m$Gf%T{aq zle>e3(*Rt(TPD}03n5)!Ca8Pu!V}m6v0o1;5<1h$*|7z|^(3$Y&;KHKTT}hV056wuF0Xo@mK-52~r=6^SI1NC%c~CC?n>yX6wPTgiWYVz!Sx^atLby9YNn1Rk{g?|pJaxD4|9cUf|V1_I*w zzxK)hRh9%zOl=*$?XUjly5z8?jPMy%vEN)f%T*|WO|bp5NWv@B(K3D6LMl!-6dQg0 zXNE&O>Oyf%K@`ngCvbGPR>HRg5!1IV$_}m@3dWB7x3t&KFyOJn9pxRXCAzFr&%37wXG;z^xaO$ekR=LJG ztIHpY8F5xBP{mtQidqNRoz= z@){+N3(VO5bD+VrmS^YjG@+JO{EOIW)9=F4v_$Ed8rZtHvjpiEp{r^c4F6Ic#ChlC zJX^DtSK+v(YdCW)^EFcs=XP7S>Y!4=xgmv>{S$~@h=xW-G4FF9?I@zYN$e5oF9g$# zb!eVU#J+NjLyX;yb)%SY)xJdvGhsnE*JEkuOVo^k5PyS=o#vq!KD46UTW_%R=Y&0G zFj6bV{`Y6)YoKgqnir2&+sl+i6foAn-**Zd1{_;Zb7Ki=u394C5J{l^H@XN`_6XTKY%X1AgQM6KycJ+= zYO=&t#5oSKB^pYhNdzPgH~aEGW2=ec1O#s-KG z71}LOg@4UEFtp3GY1PBemXpNs6UK-ax*)#$J^pC_me;Z$Je(OqLoh|ZrW*mAMBFn< zHttjwC&fkVfMnQeen8`Rvy^$pNRFVaiEN4Pih*Y3@jo!T0nsClN)pdrr9AYLcZxZ| zJ5Wlj+4q~($hbtuY zVQ7hl>4-+@6g1i`1a)rvtp-;b0>^`Dloy(#{z~ytgv=j4q^Kl}wD>K_Y!l~ zp(_&7sh`vfO(1*MO!B%<6E_bx1)&s+Ae`O)a|X=J9y~XDa@UB`m)`tSG4AUhoM=5& znWoHlA-(z@3n0=l{E)R-p8sB9XkV zZ#D8wietfHL?J5X0%&fGg@MH~(rNS2`GHS4xTo7L$>TPme+Is~!|79=^}QbPF>m%J zFMkGzSndiPO|E~hrhCeo@&Ea{M(ieIgRWMf)E}qeTxT8Q#g-!Lu*x$v8W^M^>?-g= zwMJ$dThI|~M06rG$Sv@C@tWR>_YgaG&!BAbkGggVQa#KdtDB)lMLNVLN|51C@F^y8 zCRvMB^{GO@j=cHfmy}_pCGbP%xb{pNN>? z?7tBz$1^zVaP|uaatYaIN+#xEN4jBzwZ|YI_)p(4CUAz1ZEbDk>J~Y|63SZaak~#0 zoYKruYsWHoOlC1(MhTnsdUOwQfz5p6-D0}4;DO$B;7#M{3lSE^jnTT;ns`>!G%i*F?@pR1JO{QTuD0U+~SlZxcc8~>IB{)@8p`P&+nDxNj`*gh|u?yrv$phpQcW)Us)bi`kT%qLj(fi{dWRZ%Es2!=3mI~UxiW0$-v3vUl?#g{p6eF zMEUAqo5-L0Ar(s{VlR9g=j7+lt!gP!UN2ICMokAZ5(Agd>})#gkA2w|5+<%-CuEP# zqgcM}u@3(QIC^Gx<2dbLj?cFSws_f3e%f4jeR?4M^M3cx1f+Qr6ydQ>n)kz1s##2w zk}UyQc+Z5G-d-1}{WzjkLXgS-2P7auWSJ%pSnD|Uivj5u!xk0 z_^-N9r9o;(rFDt~q1PvE#iJZ_f>J3gcP$)SOqhE~pD2|$=GvpL^d!r z6u=sp-CrMoF7;)}Zd7XO4XihC4ji?>V&(t^?@3Q&t9Mx=qex6C9d%{FE6dvU6%d94 zIE;hJ1J)cCqjv?F``7I*6bc#X)JW2b4f$L^>j{*$R`%5VHFi*+Q$2;nyieduE}qdS{L8y8F08yLs?w}{>8>$3236T-VMh@B zq-nujsb_1aUv_7g#)*rf9h%sFj*^mIcImRV*k~Vmw;%;YH(&ylYpy!&UjUVqqtfG` zox3esju?`unJJA_zKXRJP)rA3nXc$m^{S&-p|v|-0x9LHJm;XIww7C#R$?00l&Yyj z=e}gKUOpsImwW?N)+E(awoF@HyP^EhL+GlNB#k?R<2>95hz!h9sF@U20DHSB3~WMa zk90+858r@-+vWwkawJ)8ougd(i#1m3GLN{iSTylYz$brAsP%=&m$mQQrH$g%3-^VR zE%B`Vi&m8f3T~&myTEK28BDWCVzfWir1I?03;pX))|kY5ClO^+bae z*7E?g=3g7EiisYOrE+lA)2?Ln6q2*HLNpZEWMB|O-JI_oaHZB%CvYB(%=tU= zE*OY%QY58fW#RG5=gm0NR#iMB=EuNF@)%oZJ}nmm=tsJ?eGjia{e{yuU0l3{d^D@)kVDt=1PE)&tf_hHC%0MB znL|CRCPC}SeuVTdf>-QV70`0(EHizc21s^sU>y%hW0t!0&y<7}Wi-wGy>m%(-jsDj zP?mF|>p_K>liZ6ZP(w5(|9Ga%>tLgb$|doDDfkdW>Z z`)>V2XC?NJT26mL^@ zf+IKr27TfM!UbZ@?zRddC7#6ss1sw%CXJ4FWC+t3lHZupzM77m^=9 z&(a?-LxIq}*nvv)y?27lZ{j zifdl9hyJudyP2LpU$-kXctshbJDKS{WfulP5Dk~xU4Le4c#h^(YjJit4#R8_khheS z|8(>2ibaHES4+J|DBM7I#QF5u-*EdN{n=Kt@4Zt?@Tv{JZA{`4 zU#kYOv{#A&gGPwT+$Ud}AXlK3K7hYzo$(fBSFjrP{QQ zeaKg--L&jh$9N}`pu{Bs>?eDFPaWY4|9|foN%}i;3%;@4{dc+iw>m}{3rELqH21G! z`8@;w-zsJ1H(N3%|1B@#ioLOjib)j`EiJqPQVSbPSPVHCj6t5J&(NcWzBrzCiDt{4 zdlPAUKldz%6x5II1H_+jv)(xVL+a;P+-1hv_pM>gMRr%04@k;DTokASSKKhU1Qms| zrWh3a!b(J3n0>-tipg{a?UaKsP7?+|@A+1WPDiQIW1Sf@qDU~M_P65_s}7(gjTn0X zucyEm)o;f8UyshMy&>^SC3I|C6jR*R_GFwGranWZe*I>K+0k}pBuET&M~ z;Odo*ZcT?ZpduHyrf8E%IBFtv;JQ!N_m>!sV6ly$_1D{(&nO~w)G~Y`7sD3#hQk%^ zp}ucDF_$!6DAz*PM8yE(&~;%|=+h(Rn-=1Wykas_-@d&z#=S}rDf`4w(rVlcF&lF! z=1)M3YVz7orwk^BXhslJ8jR);sh^knJW(Qmm(QdSgIAIdlN4Te5KJisifjr?eB{FjAX1a0AB>d?qY4Wx>BZ8&}5K0fA+d{l8 z?^s&l8#j7pR&ijD?0b%;lL9l$P_mi2^*_OL+b}4kuLR$GAf85sOo02?Y#90}CCDiS zZ%rbCw>=H~CBO=C_JVV=xgDe%b4FaEFtuS7Q1##y686r%F6I)s-~2(}PWK|Z8M+Gu zl$y~5@#0Ka%$M<&Cv%L`a8X^@tY&T7<0|(6dNT=EsRe0%kp1Qyq!^43VAKYnr*A5~ zsI%lK1ewqO;0TpLrT9v}!@vJK{QoVa_+N4FYT#h?Y8rS1S&-G+m$FNMP?(8N`MZP zels(*?kK{{^g9DOzkuZXJ2;SrOQsp9T$hwRB1(phw1c7`!Q!by?Q#YsSM#I12RhU{$Q+{xj83axHcftEc$mNJ8_T7A-BQc*k(sZ+~NsO~xAA zxnbb%dam_fZlHvW7fKXrB~F&jS<4FD2FqY?VG?ix*r~MDXCE^WQ|W|WM;gsIA4lQP zJ2hAK@CF*3*VqPr2eeg6GzWFlICi8S>nO>5HvWzyZTE)hlkdC_>pBej*>o0EOHR|) z$?};&I4+_?wvL*g#PJ9)!bc#9BJu1(*RdNEn>#Oxta(VWeM40ola<0aOe2kSS~{^P zDJBd}0L-P#O-CzX*%+$#v;(x%<*SPgAje=F{Zh-@ucd2DA(yC|N_|ocs*|-!H%wEw z@Q!>siv2W;C^^j^59OAX03&}&D*W4EjCvfi(ygcL#~t8XGa#|NPO+*M@Y-)ctFA@I z-p7npT1#5zOLo>7q?aZpCZ=iecn3QYklP;gF0bq@>oyBq94f6C=;Csw3PkZ|5q=(c zfs`aw?II0e(h=|7o&T+hq&m$; zBrE09Twxd9BJ2P+QPN}*OdZ-JZV7%av@OM7v!!NL8R;%WFq*?{9T3{ct@2EKgc8h) zMxoM$SaF#p<`65BwIDfmXG6+OiK0e)`I=!A3E`+K@61f}0e z!2a*FOaDrOe>U`q%K!QN`&=&0C~)CaL3R4VY(NDt{Xz(Xpqru5=r#uQN1L$Je1*dkdqQ*=lofQaN%lO!<5z9ZlHgxt|`THd>2 zsWfU$9=p;yLyJyM^t zS2w9w?Bpto`@H^xJpZDKR1@~^30Il6oFGfk5%g6w*C+VM)+%R@gfIwNprOV5{F^M2 zO?n3DEzpT+EoSV-%OdvZvNF+pDd-ZVZ&d8 zKeIyrrfPN=EcFRCPEDCVflX#3-)Ik_HCkL(ejmY8vzcf-MTA{oHk!R2*36`O68$7J zf}zJC+bbQk--9Xm!u#lgLvx8TXx2J258E5^*IZ(FXMpq$2LUUvhWQPs((z1+2{Op% z?J}9k5^N=z;7ja~zi8a_-exIqWUBJwohe#4QJ`|FF*$C{lM18z^#hX6!5B8KAkLUX ziP=oti-gpV(BsLD{0(3*dw}4JxK23Y7M{BeFPucw!sHpY&l%Ws4pSm`+~V7;bZ%Dx zeI)MK=4vC&5#;2MT7fS?^ch9?2;%<8Jlu-IB&N~gg8t;6S-#C@!NU{`p7M8@2iGc& zg|JPg%@gCoCQ&s6JvDU&`X2S<57f(k8nJ1wvBu{8r?;q3_kpZZ${?|( z+^)UvR33sjSd)aT!UPkA;ylO6{aE3MQa{g%Mcf$1KONcjO@&g5zPHWtzM1rYC{_K> zgQNcs<{&X{OA=cEWw5JGqpr0O>x*Tfak2PE9?FuWtz^DDNI}rwAaT0(bdo-<+SJ6A z&}S%boGMWIS0L}=S>|-#kRX;e^sUsotry(MjE|3_9duvfc|nwF#NHuM-w7ZU!5ei8 z6Mkf>2)WunY2eU@C-Uj-A zG(z0Tz2YoBk>zCz_9-)4a>T46$(~kF+Y{#sA9MWH%5z#zNoz)sdXq7ZR_+`RZ%0(q zC7&GyS_|BGHNFl8Xa%@>iWh%Gr?=J5<(!OEjauj5jyrA-QXBjn0OAhJJ9+v=!LK`` z@g(`^*84Q4jcDL`OA&ZV60djgwG`|bcD*i50O}Q{9_noRg|~?dj%VtKOnyRs$Uzqg z191aWoR^rDX#@iSq0n z?9Sg$WSRPqSeI<}&n1T3!6%Wj@5iw5`*`Btni~G=&;J+4`7g#OQTa>u`{4ZZ(c@s$ zK0y;ySOGD-UTjREKbru{QaS>HjN<2)R%Nn-TZiQ(Twe4p@-saNa3~p{?^V9Nixz@a zykPv~<@lu6-Ng9i$Lrk(xi2Tri3q=RW`BJYOPC;S0Yly%77c727Yj-d1vF!Fuk{Xh z)lMbA69y7*5ufET>P*gXQrxsW+ zz)*MbHZv*eJPEXYE<6g6_M7N%#%mR{#awV3i^PafNv(zyI)&bH?F}2s8_rR(6%!V4SOWlup`TKAb@ee>!9JKPM=&8g#BeYRH9FpFybxBXQI2|g}FGJfJ+ zY-*2hB?o{TVL;Wt_ek;AP5PBqfDR4@Z->_182W z{P@Mc27j6jE*9xG{R$>6_;i=y{qf(c`5w9fa*`rEzX6t!KJ(p1H|>J1pC-2zqWENF zmm=Z5B4u{cY2XYl(PfrInB*~WGWik3@1oRhiMOS|D;acnf-Bs(QCm#wR;@Vf!hOPJ zgjhDCfDj$HcyVLJ=AaTbQ{@vIv14LWWF$=i-BDoC11}V;2V8A`S>_x)vIq44-VB-v z*w-d}$G+Ql?En8j!~ZkCpQ$|cA0|+rrY>tiCeWxkRGPoarxlGU2?7%k#F693RHT24 z-?JsiXlT2PTqZqNb&sSc>$d;O4V@|b6VKSWQb~bUaWn1Cf0+K%`Q&Wc<>mQ>*iEGB zbZ;aYOotBZ{vH3y<0A*L0QVM|#rf*LIsGx(O*-7)r@yyBIzJnBFSKBUSl1e|8lxU* zzFL+YDVVkIuzFWeJ8AbgN&w(4-7zbiaMn{5!JQXu)SELk*CNL+Fro|2v|YO)1l15t zs(0^&EB6DPMyaqvY>=KL>)tEpsn;N5Q#yJj<9}ImL((SqErWN3Q=;tBO~ExTCs9hB z2E$7eN#5wX4<3m^5pdjm#5o>s#eS_Q^P)tm$@SawTqF*1dj_i#)3};JslbLKHXl_N z)Fxzf>FN)EK&Rz&*|6&%Hs-^f{V|+_vL1S;-1K-l$5xiC@}%uDuwHYhmsV?YcOUlk zOYkG5v2+`+UWqpn0aaaqrD3lYdh0*!L`3FAsNKu=Q!vJu?Yc8n|CoYyDo_`r0mPoo z8>XCo$W4>l(==h?2~PoRR*kEe)&IH{1sM41mO#-36`02m#nTX{r*r`Q5rZ2-sE|nA zhnn5T#s#v`52T5|?GNS`%HgS2;R(*|^egNPDzzH_z^W)-Q98~$#YAe)cEZ%vge965AS_am#DK#pjPRr-!^za8>`kksCAUj(Xr*1NW5~e zpypt_eJpD&4_bl_y?G%>^L}=>xAaV>KR6;^aBytqpiHe%!j;&MzI_>Sx7O%F%D*8s zSN}cS^<{iiK)=Ji`FpO#^zY!_|D)qeRNAtgmH)m;qC|mq^j(|hL`7uBz+ULUj37gj zksdbnU+LSVo35riSX_4z{UX=%n&}7s0{WuZYoSfwAP`8aKN9P@%e=~1`~1ASL-z%# zw>DO&ixr}c9%4InGc*_y42bdEk)ZdG7-mTu0bD@_vGAr*NcFoMW;@r?@LUhRI zCUJgHb`O?M3!w)|CPu~ej%fddw20lod?Ufp8Dmt0PbnA0J%KE^2~AIcnKP()025V> zG>noSM3$5Btmc$GZoyP^v1@Poz0FD(6YSTH@aD0}BXva?LphAiSz9f&Y(aDAzBnUh z?d2m``~{z;{}kZJ>a^wYI?ry(V9hIoh;|EFc0*-#*`$T0DRQ1;WsqInG;YPS+I4{g zJGpKk%%Sdc5xBa$Q^_I~(F97eqDO7AN3EN0u)PNBAb+n+ zWBTxQx^;O9o0`=g+Zrt_{lP!sgWZHW?8bLYS$;1a@&7w9rD9|Ge;Gb?sEjFoF9-6v z#!2)t{DMHZ2@0W*fCx;62d#;jouz`R5Y(t{BT=$N4yr^^o$ON8d{PQ=!O zX17^CrdM~7D-;ZrC!||<+FEOxI_WI3CA<35va%4v>gc zEX-@h8esj=a4szW7x{0g$hwoWRQG$yK{@3mqd-jYiVofJE!Wok1* znV7Gm&Ssq#hFuvj1sRyHg(6PFA5U*Q8Rx>-blOs=lb`qa{zFy&n4xY;sd$fE+<3EI z##W$P9M{B3c3Si9gw^jlPU-JqD~Cye;wr=XkV7BSv#6}DrsXWFJ3eUNrc%7{=^sP> zrp)BWKA9<}^R9g!0q7yWlh;gr_TEOD|#BmGq<@IV;ueg+D2}cjpp+dPf&Q(36sFU&K8}hA85U61faW&{ zlB`9HUl-WWCG|<1XANN3JVAkRYvr5U4q6;!G*MTdSUt*Mi=z_y3B1A9j-@aK{lNvx zK%p23>M&=KTCgR!Ee8c?DAO2_R?B zkaqr6^BSP!8dHXxj%N1l+V$_%vzHjqvu7p@%Nl6;>y*S}M!B=pz=aqUV#`;h%M0rU zHfcog>kv3UZAEB*g7Er@t6CF8kHDmKTjO@rejA^ULqn!`LwrEwOVmHx^;g|5PHm#B zZ+jjWgjJ!043F+&#_;D*mz%Q60=L9Ove|$gU&~As5^uz@2-BfQ!bW)Khn}G+Wyjw- z19qI#oB(RSNydn0t~;tAmK!P-d{b-@@E5|cdgOS#!>%#Rj6ynkMvaW@37E>@hJP^8 z2zk8VXx|>#R^JCcWdBCy{0nPmYFOxN55#^-rlqobe0#L6)bi?E?SPymF*a5oDDeSd zO0gx?#KMoOd&G(2O@*W)HgX6y_aa6iMCl^~`{@UR`nMQE`>n_{_aY5nA}vqU8mt8H z`oa=g0SyiLd~BxAj2~l$zRSDHxvDs;I4>+M$W`HbJ|g&P+$!U7-PHX4RAcR0szJ*( ze-417=bO2q{492SWrqDK+L3#ChUHtz*@MP)e^%@>_&#Yk^1|tv@j4%3T)diEX zATx4K*hcO`sY$jk#jN5WD<=C3nvuVsRh||qDHnc~;Kf59zr0;c7VkVSUPD%NnnJC_ zl3F^#f_rDu8l}l8qcAz0FFa)EAt32IUy_JLIhU_J^l~FRH&6-ivSpG2PRqzDdMWft>Zc(c)#tb%wgmWN%>IOPm zZi-noqS!^Ftb81pRcQi`X#UhWK70hy4tGW1mz|+vI8c*h@ zfFGJtW3r>qV>1Z0r|L>7I3un^gcep$AAWfZHRvB|E*kktY$qQP_$YG60C@X~tTQjB3%@`uz!qxtxF+LE!+=nrS^07hn` zEgAp!h|r03h7B!$#OZW#ACD+M;-5J!W+{h|6I;5cNnE(Y863%1(oH}_FTW})8zYb$7czP zg~Szk1+_NTm6SJ0MS_|oSz%e(S~P-&SFp;!k?uFayytV$8HPwuyELSXOs^27XvK-D zOx-Dl!P|28DK6iX>p#Yb%3`A&CG0X2S43FjN%IB}q(!hC$fG}yl1y9W&W&I@KTg6@ zK^kpH8=yFuP+vI^+59|3%Zqnb5lTDAykf z9S#X`3N(X^SpdMyWQGOQRjhiwlj!0W-yD<3aEj^&X%=?`6lCy~?`&WSWt z?U~EKFcCG_RJ(Qp7j=$I%H8t)Z@6VjA#>1f@EYiS8MRHZphp zMA_5`znM=pzUpBPO)pXGYpQ6gkine{6u_o!P@Q+NKJ}k!_X7u|qfpAyIJb$_#3@wJ z<1SE2Edkfk9C!0t%}8Yio09^F`YGzpaJHGk*-ffsn85@)%4@`;Fv^8q(-Wk7r=Q8p zT&hD`5(f?M{gfzGbbwh8(}G#|#fDuk7v1W)5H9wkorE0ZZjL0Q1=NRGY>zwgfm81DdoaVwNH;or{{eSyybt)m<=zXoA^RALYG-2t zouH|L*BLvmm9cdMmn+KGopyR@4*=&0&4g|FLoreZOhRmh=)R0bg~ zT2(8V_q7~42-zvb)+y959OAv!V$u(O3)%Es0M@CRFmG{5sovIq4%8Ahjk#*5w{+)+ zMWQoJI_r$HxL5km1#6(e@{lK3Udc~n0@g`g$s?VrnQJ$!oPnb?IHh-1qA`Rz$)Ai< z6w$-MJW-gKNvOhL+XMbE7&mFt`x1KY>k4(!KbbpZ`>`K@1J<(#vVbjx@Z@(6Q}MF# zMnbr-f55(cTa^q4+#)=s+ThMaV~E`B8V=|W_fZWDwiso8tNMTNse)RNBGi=gVwgg% zbOg8>mbRN%7^Um-7oj4=6`$|(K7!+t^90a{$18Z>}<#!bm%ZEFQ{X(yBZMc>lCz0f1I2w9Sq zuGh<9<=AO&g6BZte6hn>Qmvv;Rt)*cJfTr2=~EnGD8P$v3R|&1RCl&7)b+`=QGapi zPbLg_pxm`+HZurtFZ;wZ=`Vk*do~$wB zxoW&=j0OTbQ=Q%S8XJ%~qoa3Ea|au5o}_(P;=!y-AjFrERh%8la!z6Fn@lR?^E~H12D?8#ht=1F;7@o4$Q8GDj;sSC%Jfn01xgL&%F2 zwG1|5ikb^qHv&9hT8w83+yv&BQXOQyMVJSBL(Ky~p)gU3#%|blG?IR9rP^zUbs7rOA0X52Ao=GRt@C&zlyjNLv-} z9?*x{y(`509qhCV*B47f2hLrGl^<@SuRGR!KwHei?!CM10Tq*YDIoBNyRuO*>3FU? zHjipIE#B~y3FSfOsMfj~F9PNr*H?0oHyYB^G(YyNh{SxcE(Y-`x5jFMKb~HO*m+R% zrq|ic4fzJ#USpTm;X7K+E%xsT_3VHKe?*uc4-FsILUH;kL>_okY(w`VU*8+l>o>Jm ziU#?2^`>arnsl#)*R&nf_%>A+qwl%o{l(u)M?DK1^mf260_oteV3#E_>6Y4!_hhVD zM8AI6MM2V*^_M^sQ0dmHu11fy^kOqXqzpr?K$`}BKWG`=Es(9&S@K@)ZjA{lj3ea7_MBP zk(|hBFRjHVMN!sNUkrB;(cTP)T97M$0Dtc&UXSec<+q?y>5=)}S~{Z@ua;1xt@=T5 zI7{`Z=z_X*no8s>mY;>BvEXK%b`a6(DTS6t&b!vf_z#HM{Uoy_5fiB(zpkF{})ruka$iX*~pq1ZxD?q68dIo zIZSVls9kFGsTwvr4{T_LidcWtt$u{kJlW7moRaH6+A5hW&;;2O#$oKyEN8kx`LmG)Wfq4ykh+q{I3|RfVpkR&QH_x;t41Uw z`P+tft^E2B$domKT@|nNW`EHwyj>&}K;eDpe z1bNOh=fvIfk`&B61+S8ND<(KC%>y&?>opCnY*r5M+!UrWKxv0_QvTlJc>X#AaI^xo zaRXL}t5Ej_Z$y*|w*$6D+A?Lw-CO-$itm^{2Ct82-<0IW)0KMNvJHgBrdsIR0v~=H z?n6^}l{D``Me90`^o|q!olsF?UX3YSq^6Vu>Ijm>>PaZI8G@<^NGw{Cx&%|PwYrfw zR!gX_%AR=L3BFsf8LxI|K^J}deh0ZdV?$3r--FEX`#INxsOG6_=!v)DI>0q|BxT)z z-G6kzA01M?rba+G_mwNMQD1mbVbNTWmBi*{s_v_Ft9m2Avg!^78(QFu&n6mbRJ2bA zv!b;%yo{g*9l2)>tsZJOOp}U~8VUH`}$ z8p_}t*XIOehezolNa-a2x0BS})Y9}&*TPgua{Ewn-=wVrmJUeU39EKx+%w%=ixQWK zDLpwaNJs65#6o7Ln7~~X+p_o2BR1g~VCfxLzxA{HlWAI6^H;`juI=&r1jQrUv_q0Z z1Ja-tjdktrrP>GOC*#p?*xfQU5MqjMsBe!9lh(u8)w$e@Z|>aUHI5o;MGw*|Myiz3 z-f0;pHg~Q#%*Kx8MxH%AluVXjG2C$)WL-K63@Q`#y9_k_+}eR(x4~dp7oV-ek0H>I zgy8p#i4GN{>#v=pFYUQT(g&b$OeTy-X_#FDgNF8XyfGY6R!>inYn8IR2RDa&O!(6< znXs{W!bkP|s_YI*Yx%4stI`=ZO45IK6rBs`g7sP40ic}GZ58s?Mc$&i`kq_tfci>N zIHrC0H+Qpam1bNa=(`SRKjixBTtm&e`j9porEci!zdlg1RI0Jw#b(_Tb@RQK1Zxr_ z%7SUeH6=TrXt3J@js`4iDD0=IoHhK~I7^W8^Rcp~Yaf>2wVe|Hh1bUpX9ATD#moByY57-f2Ef1TP^lBi&p5_s7WGG9|0T}dlfxOx zXvScJO1Cnq`c`~{Dp;{;l<-KkCDE+pmexJkd}zCgE{eF=)K``-qC~IT6GcRog_)!X z?fK^F8UDz$(zFUrwuR$qro5>qqn>+Z%<5>;_*3pZ8QM|yv9CAtrAx;($>4l^_$_-L z*&?(77!-=zvnCVW&kUcZMb6;2!83si518Y%R*A3JZ8Is|kUCMu`!vxDgaWjs7^0j( ziTaS4HhQ)ldR=r)_7vYFUr%THE}cPF{0H45FJ5MQW^+W>P+eEX2kLp3zzFe*-pFVA zdDZRybv?H|>`9f$AKVjFWJ=wegO7hOOIYCtd?Vj{EYLT*^gl35|HQ`R=ti+ADm{jyQE7K@kdjuqJhWVSks>b^ zxha88-h3s;%3_5b1TqFCPTxVjvuB5U>v=HyZ$?JSk+&I%)M7KE*wOg<)1-Iy)8-K! z^XpIt|0ibmk9RtMmlUd7#Ap3Q!q9N4atQy)TmrhrFhfx1DAN`^vq@Q_SRl|V z#lU<~n67$mT)NvHh`%als+G-)x1`Y%4Bp*6Un5Ri9h=_Db zA-AdP!f>f0m@~>7X#uBM?diI@)Egjuz@jXKvm zJo+==juc9_<;CqeRaU9_Mz@;3e=E4=6TK+c`|uu#pIqhSyNm`G(X)&)B`8q0RBv#> z`gGlw(Q=1Xmf55VHj%C#^1lpc>LY8kfA@|rlC1EA<1#`iuyNO z(=;irt{_&K=i4)^x%;U(Xv<)+o=dczC5H3W~+e|f~{*ucxj@{Yi-cw^MqYr3fN zF5D+~!wd$#al?UfMnz(@K#wn`_5na@rRr8XqN@&M&FGEC@`+OEv}sI1hw>Up0qAWf zL#e4~&oM;TVfjRE+10B_gFlLEP9?Q-dARr3xi6nQqnw>k-S;~b z;!0s2VS4}W8b&pGuK=7im+t(`nz@FnT#VD|!)eQNp-W6)@>aA+j~K*H{$G`y2|QHY z|Hmy+CR@#jWY4~)lr1qBJB_RfHJFfP<}pK5(#ZZGSqcpyS&}01LnTWk5fzmXMGHkJ zTP6L^B+uj;lmB_W<~4=${+v0>z31M!-_O@o-O9GyW)j_mjx}!0@br_LE-7SIuPP84 z;5=O(U*g_um0tyG|61N@d9lEuOeiRd+#NY^{nd5;-CVlw&Ap7J?qwM^?E29wvS}2d zbzar4Fz&RSR(-|s!Z6+za&Z zY#D<5q_JUktIzvL0)yq_kLWG6DO{ri=?c!y!f(Dk%G{8)k`Gym%j#!OgXVDD3;$&v@qy#ISJfp=Vm>pls@9-mapVQChAHHd-x+OGx)(*Yr zC1qDUTZ6mM(b_hi!TuFF2k#8uI2;kD70AQ&di$L*4P*Y-@p`jdm%_c3f)XhYD^6M8&#Y$ZpzQMcR|6nsH>b=*R_Von!$BTRj7yGCXokoAQ z&ANvx0-Epw`QIEPgI(^cS2f(Y85yV@ygI{ewyv5Frng)e}KCZF7JbR(&W618_dcEh(#+^zZFY;o<815<5sOHQdeax9_!PyM&;{P zkBa5xymca0#)c#tke@3KNEM8a_mT&1gm;p&&JlMGH(cL(b)BckgMQ^9&vRwj!~3@l zY?L5}=Jzr080OGKb|y`ee(+`flQg|!lo6>=H)X4`$Gz~hLmu2a%kYW_Uu8x09Pa0J zKZ`E$BKJ=2GPj_3l*TEcZ*uYRr<*J^#5pILTT;k_cgto1ZL-%slyc16J~OH-(RgDA z%;EjEnoUkZ&acS{Q8`{i6T5^nywgqQI5bDIymoa7CSZG|WWVk>GM9)zy*bNih|QIm z%0+(Nnc*a_xo;$=!HQYaapLms>J1ToyjtFByY`C2H1wT#178#4+|{H0BBqtCdd$L% z_3Hc60j@{t9~MjM@LBalR&6@>B;9?r<7J~F+WXyYu*y3?px*=8MAK@EA+jRX8{CG?GI-< z54?Dc9CAh>QTAvyOEm0^+x;r2BWX|{3$Y7)L5l*qVE*y0`7J>l2wCmW zL1?|a`pJ-l{fb_N;R(Z9UMiSj6pQjOvQ^%DvhIJF!+Th7jO2~1f1N+(-TyCFYQZYw z4)>7caf^Ki_KJ^Zx2JUb z&$3zJy!*+rCV4%jqwyuNY3j1ZEiltS0xTzd+=itTb;IPYpaf?8Y+RSdVdpacB(bVQ zC(JupLfFp8y43%PMj2}T|VS@%LVp>hv4Y!RPMF?pp8U_$xCJ)S zQx!69>bphNTIb9yn*_yfj{N%bY)t{L1cs8<8|!f$;UQ*}IN=2<6lA;x^(`8t?;+ST zh)z4qeYYgZkIy{$4x28O-pugO&gauRh3;lti9)9Pvw+^)0!h~%m&8Q!AKX%urEMnl z?yEz?g#ODn$UM`+Q#$Q!6|zsq_`dLO5YK-6bJM6ya>}H+vnW^h?o$z;V&wvuM$dR& zeEq;uUUh$XR`TWeC$$c&Jjau2it3#%J-y}Qm>nW*s?En?R&6w@sDXMEr#8~$=b(gk zwDC3)NtAP;M2BW_lL^5ShpK$D%@|BnD{=!Tq)o(5@z3i7Z){} zGr}Exom_qDO{kAVkZ*MbLNHE666Kina#D{&>Jy%~w7yX$oj;cYCd^p9zy z8*+wgSEcj$4{WxKmCF(5o7U4jqwEvO&dm1H#7z}%VXAbW&W24v-tS6N3}qrm1OnE)fUkoE8yMMn9S$?IswS88tQWm4#Oid#ckgr6 zRtHm!mfNl-`d>O*1~d7%;~n+{Rph6BBy^95zqI{K((E!iFQ+h*C3EsbxNo_aRm5gj zKYug($r*Q#W9`p%Bf{bi6;IY0v`pB^^qu)gbg9QHQ7 zWBj(a1YSu)~2RK8Pi#C>{DMlrqFb9e_RehEHyI{n?e3vL_}L>kYJC z_ly$$)zFi*SFyNrnOt(B*7E$??s67EO%DgoZL2XNk8iVx~X_)o++4oaK1M|ou73vA0K^503j@uuVmLcHH4ya-kOIDfM%5%(E z+Xpt~#7y2!KB&)PoyCA+$~DXqxPxxALy!g-O?<9+9KTk4Pgq4AIdUkl`1<1#j^cJg zgU3`0hkHj_jxV>`Y~%LAZl^3o0}`Sm@iw7kwff{M%VwtN)|~!p{AsfA6vB5UolF~d zHWS%*uBDt<9y!9v2Xe|au&1j&iR1HXCdyCjxSgG*L{wmTD4(NQ=mFjpa~xooc6kju z`~+d{j7$h-;HAB04H!Zscu^hZffL#9!p$)9>sRI|Yovm)g@F>ZnosF2EgkU3ln0bR zTA}|+E(tt)!SG)-bEJi_0m{l+(cAz^pi}`9=~n?y&;2eG;d9{M6nj>BHGn(KA2n|O zt}$=FPq!j`p&kQ8>cirSzkU0c08%8{^Qyqi-w2LoO8)^E7;;I1;HQ6B$u0nNaX2CY zSmfi)F`m94zL8>#zu;8|{aBui@RzRKBlP1&mfFxEC@%cjl?NBs`cr^nm){>;$g?rhKr$AO&6qV_Wbn^}5tfFBry^e1`%du2~o zs$~dN;S_#%iwwA_QvmMjh%Qo?0?rR~6liyN5Xmej8(*V9ym*T`xAhHih-v$7U}8=dfXi2i*aAB!xM(Xekg*ix@r|ymDw*{*s0?dlVys2e)z62u1 z+k3esbJE=-P5S$&KdFp+2H7_2e=}OKDrf( z9-207?6$@f4m4B+9E*e((Y89!q?zH|mz_vM>kp*HGXldO0Hg#!EtFhRuOm$u8e~a9 z5(roy7m$Kh+zjW6@zw{&20u?1f2uP&boD}$#Zy)4o&T;vyBoqFiF2t;*g=|1=)PxB z8eM3Mp=l_obbc?I^xyLz?4Y1YDWPa+nm;O<$Cn;@ane616`J9OO2r=rZr{I_Kizyc zP#^^WCdIEp*()rRT+*YZK>V@^Zs=ht32x>Kwe zab)@ZEffz;VM4{XA6e421^h~`ji5r%)B{wZu#hD}f3$y@L0JV9f3g{-RK!A?vBUA}${YF(vO4)@`6f1 z-A|}e#LN{)(eXloDnX4Vs7eH|<@{r#LodP@Nz--$Dg_Par%DCpu2>2jUnqy~|J?eZ zBG4FVsz_A+ibdwv>mLp>P!(t}E>$JGaK$R~;fb{O3($y1ssQQo|5M;^JqC?7qe|hg zu0ZOqeFcp?qVn&Qu7FQJ4hcFi&|nR!*j)MF#b}QO^lN%5)4p*D^H+B){n8%VPUzi! zDihoGcP71a6!ab`l^hK&*dYrVYzJ0)#}xVrp!e;lI!+x+bfCN0KXwUAPU9@#l7@0& QuEJmfE|#`Dqx|px0L@K;Y5)KL literal 0 HcmV?d00001 diff --git a/lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.properties b/lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..070cb70 --- /dev/null +++ b/lib/sdk/server/contract-tests/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/lib/sdk/server/contract-tests/gradlew b/lib/sdk/server/contract-tests/gradlew new file mode 100755 index 0000000..1b6c787 --- /dev/null +++ b/lib/sdk/server/contract-tests/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/lib/sdk/server/contract-tests/gradlew.bat b/lib/sdk/server/contract-tests/gradlew.bat new file mode 100644 index 0000000..ac1b06f --- /dev/null +++ b/lib/sdk/server/contract-tests/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/lib/sdk/server/contract-tests/service/build.gradle b/lib/sdk/server/contract-tests/service/build.gradle new file mode 100644 index 0000000..64de63e --- /dev/null +++ b/lib/sdk/server/contract-tests/service/build.gradle @@ -0,0 +1,48 @@ + +plugins { + id "java" + id "application" +} + +repositories { + mavenCentral() + mavenLocal() + maven { url "https://oss.sonatype.org/content/groups/public/" } +} + +configurations.all { + // check for updates every build for dependencies with: 'changing: true' + resolutionStrategy.cacheChangingModulesFor 0, 'seconds' +} + +allprojects { + sourceCompatibility = 1.8 + targetCompatibility = 1.8 +} + +archivesBaseName = "java-sdk-test-service" + +application { + mainClassName = "sdktest.TestService" +} + +ext.versions = [ + "gson": "2.7", + "logback": "1.1.3", + "okhttp": "4.5.0", + "testHelpers": "2.0.1", + "launchdarklyJavaSdkCommon": project(":sdk").versions["launchdarklyJavaSdkCommon"] +] + +configurations { + deps.extendsFrom(implementation) +} + +dependencies { + implementation project(":sdk") + implementation "com.launchdarkly:launchdarkly-java-sdk-common:${versions.launchdarklyJavaSdkCommon}" + implementation "ch.qos.logback:logback-classic:${versions.logback}" + implementation "com.google.code.gson:gson:${versions.gson}" + implementation "com.squareup.okhttp3:okhttp:${versions.okhttp}" + implementation "com.launchdarkly:test-helpers:${versions.testHelpers}" +} diff --git a/lib/sdk/server/contract-tests/service/settings.gradle b/lib/sdk/server/contract-tests/service/settings.gradle new file mode 100644 index 0000000..e69de29 diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackRepresentation.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackRepresentation.java new file mode 100644 index 0000000..70bb31f --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackRepresentation.java @@ -0,0 +1,17 @@ +package sdktest; + +import java.util.Map; + +public abstract class BigSegmentCallbackRepresentation { + public static class BigSegmentStoreGetMetadataResponse { + Long lastUpToDate; + } + + public static class BigSegmentStoreGetMembershipParams { + String contextHash; + } + + public static class BigSegmentStoreGetMembershipResponse { + Map values; + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackService.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackService.java new file mode 100644 index 0000000..977ebbc --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentCallbackService.java @@ -0,0 +1,57 @@ +package sdktest; + +import java.net.URI; + +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +public class BigSegmentCallbackService { + private final URI baseUri; + + public BigSegmentCallbackService(URI baseUri) { + this.baseUri = baseUri; + } + + public void close() { + try { + Request request = new Request.Builder().url(baseUri.toURL()).method("DELETE", null).build(); + Response response = TestService.client.newCall(request).execute(); + assertOk(response, ""); + } catch (Exception e) { + throw new RuntimeException(e); // all errors are unexpected here + } + } + + public T post(String path, Object params, Class responseClass) { + try { + String uri = baseUri.toString() + path; + RequestBody body = RequestBody.create( + TestService.gson.toJson(params == null ? "{}" : params), + MediaType.parse("application/json")); + Request request = new Request.Builder().url(uri). + method("POST", body).build(); + Response response = TestService.client.newCall(request).execute(); + assertOk(response, path); + if (responseClass == null) { + return null; + } + return TestService.gson.fromJson(response.body().string(), responseClass); + } catch (Exception e) { + throw new RuntimeException(e); // all errors are unexpected here + } + } + + private void assertOk(Response response, String path) { + if (!response.isSuccessful()) { + String body = ""; + if (response.body() != null) { + try { + body = ": " + response.body().string(); + } catch (Exception e) {} + } + throw new RuntimeException("HTTP error " + response.code() + " from callback to " + baseUri + path + body); + } + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentStoreFixture.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentStoreFixture.java new file mode 100644 index 0000000..dbdb4d4 --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/BigSegmentStoreFixture.java @@ -0,0 +1,52 @@ +package sdktest; + +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import java.io.IOException; + +import sdktest.BigSegmentCallbackRepresentation.BigSegmentStoreGetMembershipParams; +import sdktest.BigSegmentCallbackRepresentation.BigSegmentStoreGetMembershipResponse; +import sdktest.BigSegmentCallbackRepresentation.BigSegmentStoreGetMetadataResponse; + +public class BigSegmentStoreFixture implements BigSegmentStore, ComponentConfigurer { + private final BigSegmentCallbackService service; + + public BigSegmentStoreFixture(BigSegmentCallbackService service) { + this.service = service; + } + + @Override + public void close() throws IOException { + service.close(); + } + + @Override + public Membership getMembership(String contextHash) { + BigSegmentStoreGetMembershipParams params = new BigSegmentStoreGetMembershipParams(); + params.contextHash = contextHash; + BigSegmentStoreGetMembershipResponse resp = + service.post("/getMembership", params, BigSegmentStoreGetMembershipResponse.class); + return new Membership() { + @Override + public Boolean checkMembership(String segmentRef) { + return resp.values == null ? null : resp.values.get(segmentRef); + } + }; + } + + @Override + public StoreMetadata getMetadata() { + BigSegmentStoreGetMetadataResponse resp = + service.post("/getMetadata", null, BigSegmentStoreGetMetadataResponse.class); + return new StoreMetadata(resp.lastUpToDate); + } + + @Override + public BigSegmentStore build(ClientContext context) { + return this; + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/HookCallbackService.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/HookCallbackService.java new file mode 100644 index 0000000..8ee5b5c --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/HookCallbackService.java @@ -0,0 +1,42 @@ +package sdktest; + +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +import java.net.URI; + +public class HookCallbackService { + private final URI serviceUri; + + public HookCallbackService(URI serviceUri) { + this.serviceUri = serviceUri; + } + + public void post(Object params) { + try { + RequestBody body = RequestBody.create( + TestService.gson.toJson(params == null ? "{}" : params), + MediaType.parse("application/json")); + Request request = new Request.Builder().url(serviceUri.toString()). + method("POST", body).build(); + Response response = TestService.client.newCall(request).execute(); + assertOk(response); + } catch (Exception e) { + throw new RuntimeException(e); // all errors are unexpected here + } + } + + private void assertOk(Response response) { + if (!response.isSuccessful()) { + String body = ""; + if (response.body() != null) { + try { + body = ": " + response.body().string(); + } catch (Exception e) {} + } + throw new RuntimeException("HTTP error " + response.code() + " from callback to " + serviceUri + body); + } + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/MigrationCallbackService.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/MigrationCallbackService.java new file mode 100644 index 0000000..35d5448 --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/MigrationCallbackService.java @@ -0,0 +1,33 @@ +package sdktest; + +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +import java.net.URL; + +public class MigrationCallbackService { + private final URL serviceUrl; + + public MigrationCallbackService(URL serviceUrl) { + this.serviceUrl = serviceUrl; + } + + public String post(String payload) { + try { + RequestBody body = RequestBody.create( + payload != null ? payload : "", + MediaType.parse("application/text")); + Request request = new Request.Builder().url(serviceUrl). + method("POST", body).build(); + Response response = TestService.client.newCall(request).execute(); + if(!response.isSuccessful()) { + throw new RuntimeException("Non success status code."); + } + return response.body().string(); + } catch (Exception e) { + throw new RuntimeException(e); // all errors are unexpected here + } + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/Representations.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/Representations.java new file mode 100644 index 0000000..5447cfe --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/Representations.java @@ -0,0 +1,223 @@ +package sdktest; + +import com.google.gson.annotations.SerializedName; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +public abstract class Representations { + public static class Status { + String name; + String[] capabilities; + String clientVersion; + } + + public static class CreateInstanceParams { + SdkConfigParams configuration; + String tag; + } + + public static class SdkConfigParams { + String credential; + Long startWaitTimeMs; + boolean initCanFail; + SdkConfigStreamParams streaming; + SdkConfigEventParams events; + SdkConfigBigSegmentsParams bigSegments; + SdkConfigTagParams tags; + SdkConfigServiceEndpointParams serviceEndpoints; + SdkConfigHookParams hooks; + } + + public static class SdkConfigStreamParams { + URI baseUri; + long initialRetryDelayMs; + String filter; + } + + public static class SdkConfigEventParams { + URI baseUri; + boolean allAttributesPrivate; + int capacity; + boolean enableDiagnostics; + String[] globalPrivateAttributes; + Long flushIntervalMs; + } + + public static class SdkConfigBigSegmentsParams { + URI callbackUri; + Integer userCacheSize; + Long userCacheTimeMs; + Long statusPollIntervalMs; + Long staleAfterMs; + } + + public static class SdkConfigTagParams { + String applicationId; + String applicationVersion; + } + + public static class SdkConfigServiceEndpointParams { + String streaming; + String polling; + String events; + } + + public static class SdkConfigHookParams { + List hooks; + } + + public static class HookConfig { + String name; + URI callbackUri; + HookData data; + HookErrors errors; + } + + public static class HookData { + Map beforeEvaluation; + Map afterEvaluation; + } + + public static class HookErrors { + String beforeEvaluation; + String afterEvaluation; + } + + public static class CommandParams { + String command; + EvaluateFlagParams evaluate; + EvaluateAllFlagsParams evaluateAll; + IdentifyEventParams identifyEvent; + CustomEventParams customEvent; + ContextBuildParams contextBuild; + ContextConvertParams contextConvert; + SecureModeHashParams secureModeHash; + + MigrationVariationParams migrationVariation; + + MigrationOperationParams migrationOperation; + } + + public static class EvaluateFlagParams { + String flagKey; + LDContext context; + String valueType; + LDValue value; + LDValue defaultValue; + boolean detail; + } + + public static class EvaluateFlagResponse { + LDValue value; + Integer variationIndex; + EvaluationReason reason; + } + + public static class EvaluateAllFlagsParams { + LDContext context; + boolean clientSideOnly; + boolean detailsOnlyForTrackedFlags; + boolean withReasons; + } + + public static class EvaluateAllFlagsResponse { + LDValue state; + } + + public static class EvaluationHookCallbackParams { + EvaluationSeriesContextParam evaluationSeriesContext; + Map evaluationSeriesData; + EvaluationDetail evaluationDetail; + String stage; + } + + public static class EvaluationSeriesContextParam { + String flagKey; + LDContext context; + LDValue defaultValue; + String method; + } + + public static class IdentifyEventParams { + LDContext context; + } + + public static class CustomEventParams { + String eventKey; + LDContext context; + LDValue data; + boolean omitNullData; + Double metricValue; + } + + public static class GetBigSegmentsStoreStatusResponse { + boolean available; + boolean stale; + } + + public static class ContextBuildParams { + ContextBuildSingleParams single; + ContextBuildSingleParams[] multi; + } + + public static class ContextBuildSingleParams { + public String kind; + public String key; + public String name; + public Boolean anonymous; + @SerializedName("private") public String[] privateAttrs; + public Map custom; + } + + public static class ContextBuildResponse { + String output; + String error; + } + + public static class ContextConvertParams { + String input; + } + + public static class SecureModeHashParams { + LDContext context; + } + + public static class SecureModeHashResponse { + String result; + } + + public static class MigrationVariationParams { + String key; + LDContext context; + String defaultStage; + } + + public static class MigrationVariationResponse { + String result; + } + + public static class MigrationOperationParams { + String operation; + LDContext context; + String key; + String defaultStage; + String payload; + String readExecutionOrder; + boolean trackConsistency; + boolean trackLatency; + boolean trackErrors; + String oldEndpoint; + String newEndpoint; + } + + public static class MigrationOperationResponse { + String result; + String error; + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/SdkClientEntity.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/SdkClientEntity.java new file mode 100644 index 0000000..c7f43b0 --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/SdkClientEntity.java @@ -0,0 +1,469 @@ +package sdktest; + +import com.launchdarkly.sdk.ContextBuilder; +import com.launchdarkly.sdk.ContextMultiBuilder; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.json.JsonSerialization; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.FeatureFlagsState; +import com.launchdarkly.sdk.server.FlagsStateOption; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationBuilder; +import com.launchdarkly.sdk.server.migrations.MigrationExecution; +import com.launchdarkly.sdk.server.migrations.MigrationMethodResult; +import com.launchdarkly.sdk.server.migrations.MigrationSerialOrder; +import com.launchdarkly.sdk.server.MigrationStage; +import com.launchdarkly.sdk.server.MigrationVariation; +import com.launchdarkly.sdk.server.integrations.Hook; +import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder; +import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder; +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; + +import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.URL; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import sdktest.Representations.CommandParams; +import sdktest.Representations.ContextBuildParams; +import sdktest.Representations.ContextBuildResponse; +import sdktest.Representations.ContextBuildSingleParams; +import sdktest.Representations.ContextConvertParams; +import sdktest.Representations.CreateInstanceParams; +import sdktest.Representations.CustomEventParams; +import sdktest.Representations.EvaluateAllFlagsParams; +import sdktest.Representations.EvaluateAllFlagsResponse; +import sdktest.Representations.EvaluateFlagParams; +import sdktest.Representations.EvaluateFlagResponse; +import sdktest.Representations.GetBigSegmentsStoreStatusResponse; +import sdktest.Representations.IdentifyEventParams; +import sdktest.Representations.HookConfig; +import sdktest.Representations.SdkConfigHookParams; +import sdktest.Representations.SdkConfigParams; +import sdktest.Representations.SecureModeHashParams; +import sdktest.Representations.SecureModeHashResponse; + +public class SdkClientEntity { + private final LDClient client; + final Logger logger; + + public SdkClientEntity(TestService owner, CreateInstanceParams params) { + this.logger = LoggerFactory.getLogger(params.tag); + logger.info("Starting SDK client"); + + LDConfig config = buildSdkConfig(params.configuration, params.tag); + this.client = new LDClient(params.configuration.credential, config); + if (!client.isInitialized() && !params.configuration.initCanFail) { + throw new RuntimeException("client initialization failed or timed out"); + } + } + + public Object doCommand(CommandParams params) throws TestService.BadRequestException { + logger.info("Test harness sent command: {}", TestService.gson.toJson(params)); + switch (params.command) { + case "evaluate": + return doEvaluateFlag(params.evaluate); + case "evaluateAll": + return doEvaluateAll(params.evaluateAll); + case "identifyEvent": + doIdentifyEvent(params.identifyEvent); + return null; + case "customEvent": + doCustomEvent(params.customEvent); + return null; + case "flushEvents": + client.flush(); + return null; + case "getBigSegmentStoreStatus": + BigSegmentStoreStatusProvider.Status status = client.getBigSegmentStoreStatusProvider().getStatus(); + GetBigSegmentsStoreStatusResponse resp = new GetBigSegmentsStoreStatusResponse(); + resp.available = status.isAvailable(); + resp.stale = status.isStale(); + return resp; + case "contextBuild": + return doContextBuild(params.contextBuild); + case "contextConvert": + return doContextConvert(params.contextConvert); + case "secureModeHash": + return doSecureModeHash(params.secureModeHash); + case "migrationVariation": + Representations.MigrationVariationParams mvp = params.migrationVariation; + if(!MigrationStage.isStage(mvp.defaultStage)) { + logger.error("The default state for a migration variation was not valid. Received: {}", mvp.defaultStage); + } + MigrationVariation variation = client.migrationVariation(mvp.key, mvp.context, + MigrationStage.of(mvp.defaultStage, MigrationStage.OFF)); + Representations.MigrationVariationResponse res = new Representations.MigrationVariationResponse(); + res.result = variation.getStage().toString(); + return res; + case "migrationOperation": + return doMigrationOperation(params.migrationOperation); + default: + throw new TestService.BadRequestException("unknown command: " + params.command); + } + } + + private EvaluateFlagResponse doEvaluateFlag(EvaluateFlagParams params) { + EvaluateFlagResponse resp = new EvaluateFlagResponse(); + if (params.detail) { + EvaluationDetail genericResult; + switch (params.valueType) { + case "bool": + EvaluationDetail boolResult = + client.boolVariationDetail(params.flagKey, params.context, params.defaultValue.booleanValue()); + resp.value = LDValue.of(boolResult.getValue()); + genericResult = boolResult; + break; + case "int": + EvaluationDetail intResult = + client.intVariationDetail(params.flagKey, params.context, params.defaultValue.intValue()); + resp.value = LDValue.of(intResult.getValue()); + genericResult = intResult; + break; + case "double": + EvaluationDetail doubleResult = + client.doubleVariationDetail(params.flagKey, params.context, params.defaultValue.doubleValue()); + resp.value = LDValue.of(doubleResult.getValue()); + genericResult = doubleResult; + break; + case "string": + EvaluationDetail stringResult = + client.stringVariationDetail(params.flagKey, params.context, params.defaultValue.stringValue()); + resp.value = LDValue.of(stringResult.getValue()); + genericResult = stringResult; + break; + default: + EvaluationDetail anyResult = + client.jsonValueVariationDetail(params.flagKey, params.context, params.defaultValue); + resp.value = anyResult.getValue(); + genericResult = anyResult; + break; + } + resp.variationIndex = genericResult.getVariationIndex() == EvaluationDetail.NO_VARIATION ? + null : Integer.valueOf(genericResult.getVariationIndex()); + resp.reason = genericResult.getReason(); + } else { + switch (params.valueType) { + case "bool": + resp.value = LDValue.of( + client.boolVariation(params.flagKey, params.context, params.defaultValue.booleanValue())); + break; + case "int": + resp.value = LDValue.of( + client.intVariation(params.flagKey, params.context, params.defaultValue.intValue())); + break; + case "double": + resp.value = LDValue.of( + client.doubleVariation(params.flagKey, params.context, params.defaultValue.doubleValue())); + break; + case "string": + resp.value = LDValue.of( + client.stringVariation(params.flagKey, params.context, params.defaultValue.stringValue())); + break; + default: + resp.value = + client.jsonValueVariation(params.flagKey, params.context, params.defaultValue); + break; + } + } + return resp; + } + + private EvaluateAllFlagsResponse doEvaluateAll(EvaluateAllFlagsParams params) { + List options = new ArrayList<>(); + if (params.clientSideOnly) { + options.add(FlagsStateOption.CLIENT_SIDE_ONLY); + } + if (params.detailsOnlyForTrackedFlags) { + options.add(FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS); + } + if (params.withReasons) { + options.add(FlagsStateOption.WITH_REASONS); + } + FeatureFlagsState state = client.allFlagsState(params.context, options.toArray(new FlagsStateOption[0])); + EvaluateAllFlagsResponse resp = new EvaluateAllFlagsResponse(); + resp.state = LDValue.parse(JsonSerialization.serialize(state)); + return resp; + } + + private void doIdentifyEvent(IdentifyEventParams params) { + client.identify(params.context); + } + + private void doCustomEvent(CustomEventParams params) { + if ((params.data == null || params.data.isNull()) && params.omitNullData && params.metricValue == null) { + client.track(params.eventKey, params.context); + } else if (params.metricValue == null) { + client.trackData(params.eventKey, params.context, params.data); + } else { + client.trackMetric(params.eventKey, params.context, params.data, params.metricValue.doubleValue()); + } + } + + private ContextBuildResponse doContextBuild(ContextBuildParams params) { + LDContext c; + if (params.multi == null) { + c = doContextBuildSingle(params.single); + } else { + ContextMultiBuilder b = LDContext.multiBuilder(); + for (ContextBuildSingleParams s : params.multi) { + b.add(doContextBuildSingle(s)); + } + c = b.build(); + } + ContextBuildResponse resp = new ContextBuildResponse(); + if (c.isValid()) { + resp.output = JsonSerialization.serialize(c); + } else { + resp.error = c.getError(); + } + return resp; + } + + private LDContext doContextBuildSingle(ContextBuildSingleParams params) { + ContextBuilder b = LDContext.builder(params.key) + .kind(params.kind) + .name(params.name); + if (params.anonymous != null) { + b.anonymous(params.anonymous.booleanValue()); + } + if (params.custom != null) { + for (Map.Entry kv : params.custom.entrySet()) { + b.set(kv.getKey(), kv.getValue()); + } + } + if (params.privateAttrs != null) { + b.privateAttributes(params.privateAttrs); + } + return b.build(); + } + + private ContextBuildResponse doContextConvert(ContextConvertParams params) { + ContextBuildResponse resp = new ContextBuildResponse(); + try { + LDContext c = JsonSerialization.deserialize(params.input, LDContext.class); + resp.output = JsonSerialization.serialize(c); + } catch (Exception e) { + resp.error = e.getMessage(); + } + return resp; + } + + private SecureModeHashResponse doSecureModeHash(SecureModeHashParams params) { + SecureModeHashResponse resp = new SecureModeHashResponse(); + resp.result = client.secureModeHash(params.context); + return resp; + } + + private Representations.MigrationOperationResponse doMigrationOperation(Representations.MigrationOperationParams params) { + MigrationCallbackService oldService; + MigrationCallbackService newService; + try { + oldService = new MigrationCallbackService(new URL(params.oldEndpoint)); + newService = new MigrationCallbackService(new URL(params.newEndpoint)); + } catch (Exception e) { + return null; + } + MigrationBuilder migrationBuilder = new MigrationBuilder<>(client); + migrationBuilder.readExecution(getExecution(params.readExecutionOrder)); + migrationBuilder.trackErrors(params.trackErrors); + migrationBuilder.trackLatency(params.trackLatency); + migrationBuilder.write( + (payload) -> getMigrationMethodResult(payload, oldService), + payload -> getMigrationMethodResult(payload, newService)); + if (params.trackConsistency) { + migrationBuilder.read( + (payload -> getMigrationMethodResult(payload, oldService)), + (payload) -> getMigrationMethodResult(payload, newService), + (a, b) -> a.equals(b)); + } else { + migrationBuilder.read((payload -> getMigrationMethodResult(payload, oldService)), + (payload) -> getMigrationMethodResult(payload, newService)); + } + Optional> opt = migrationBuilder.build(); + if (!opt.isPresent()) { + return null; + } + Migration migration = opt.get(); + + switch (params.operation) { + case "read": { + Migration.MigrationResult res = migration.read( + params.key, + params.context, + MigrationStage.of(params.defaultStage, MigrationStage.OFF), + params.payload); + Representations.MigrationOperationResponse response = new Representations.MigrationOperationResponse(); + if (res.isSuccess()) { + response.result = res.getResult().orElse(null); + } else { + response.error = res.getException().map(ex -> ex.getMessage()).orElse(null); + } + return response; + } + case "write": { + Migration.MigrationWriteResult res = migration.write( + params.key, + params.context, + MigrationStage.of(params.defaultStage, MigrationStage.OFF), + params.payload); + Representations.MigrationOperationResponse response = new Representations.MigrationOperationResponse(); + if (res.getAuthoritative().isSuccess()) { + response.result = res.getAuthoritative().getResult().orElse(null); + } else { + response.error = res.getAuthoritative() + .getException().map(ex -> ex.getMessage()).orElse(null); + } + return response; + } + default: + return null; + } + } + + @NotNull + private static MigrationMethodResult getMigrationMethodResult(String payload, MigrationCallbackService oldService) { + String response = oldService.post(payload); + return MigrationMethodResult.Success(response); + } + + private MigrationExecution getExecution(String execution) { + switch (execution) { + case "serial": + return MigrationExecution.Serial(MigrationSerialOrder.FIXED); + case "random": + return MigrationExecution.Serial(MigrationSerialOrder.RANDOM); + case "concurrent": + return MigrationExecution.Parallel(); + default: + throw new RuntimeException("Invalid execution mode"); + } + } + + public void close() { + try { + client.close(); + } catch (Exception e) { + logger.error("Unexpected error from LDClient.close(): {}", e); + } + logger.info("Test ended"); + } + + private LDConfig buildSdkConfig(SdkConfigParams params, String tag) { + LDConfig.Builder builder = new LDConfig.Builder(); + + builder.logging(Components.logging().baseLoggerName(tag + ".sdk")); + + if (params.startWaitTimeMs != null) { + builder.startWait(Duration.ofMillis(params.startWaitTimeMs.longValue())); + } + + ServiceEndpointsBuilder endpoints = Components.serviceEndpoints(); + + if (params.streaming != null) { + endpoints.streaming(params.streaming.baseUri); + StreamingDataSourceBuilder dataSource = Components.streamingDataSource(); + if (params.streaming.initialRetryDelayMs > 0) { + dataSource.initialReconnectDelay(Duration.ofMillis(params.streaming.initialRetryDelayMs)); + } + dataSource.payloadFilter(params.streaming.filter); + builder.dataSource(dataSource); + } + + if (params.events == null) { + builder.events(Components.noEvents()); + } else { + endpoints.events(params.events.baseUri); + EventProcessorBuilder eb = Components.sendEvents() + .allAttributesPrivate(params.events.allAttributesPrivate); + if (params.events.capacity > 0) { + eb.capacity(params.events.capacity); + } + if (params.events.flushIntervalMs != null) { + eb.flushInterval(Duration.ofMillis(params.events.flushIntervalMs.longValue())); + } + if (params.events.globalPrivateAttributes != null) { + eb.privateAttributes(params.events.globalPrivateAttributes); + } + builder.events(eb); + builder.diagnosticOptOut(!params.events.enableDiagnostics); + } + + if (params.bigSegments != null) { + BigSegmentsConfigurationBuilder bsb = Components.bigSegments( + new BigSegmentStoreFixture(new BigSegmentCallbackService(params.bigSegments.callbackUri))); + if (params.bigSegments.staleAfterMs != null) { + bsb.staleAfter(Duration.ofMillis(params.bigSegments.staleAfterMs)); + } + if (params.bigSegments.statusPollIntervalMs != null) { + bsb.statusPollInterval(Duration.ofMillis(params.bigSegments.statusPollIntervalMs)); + } + if (params.bigSegments.userCacheSize != null) { + bsb.userCacheSize(params.bigSegments.userCacheSize); + } + if (params.bigSegments.userCacheTimeMs != null) { + bsb.userCacheTime(Duration.ofMillis(params.bigSegments.userCacheTimeMs)); + } + builder.bigSegments(bsb); + } + + if (params.tags != null) { + ApplicationInfoBuilder ab = Components.applicationInfo(); + if (params.tags.applicationId != null) { + ab.applicationId(params.tags.applicationId); + } + if (params.tags.applicationVersion != null) { + ab.applicationVersion(params.tags.applicationVersion); + } + builder.applicationInfo(ab); + } + + if (params.serviceEndpoints != null) { + if (params.serviceEndpoints.streaming != null) { + endpoints.streaming(params.serviceEndpoints.streaming); + } + if (params.serviceEndpoints.polling != null) { + endpoints.polling(params.serviceEndpoints.polling); + } + if (params.serviceEndpoints.events != null) { + endpoints.events(params.serviceEndpoints.events); + } + } + builder.serviceEndpoints(endpoints); + + if (params.hooks != null && params.hooks.hooks != null) { + List hookList = new ArrayList<>(); + for (HookConfig hookConfig : params.hooks.hooks) { + + HookCallbackService callbackService = new HookCallbackService(hookConfig.callbackUri); + TestHook testHook = new TestHook( + hookConfig.name, + callbackService, + hookConfig.data != null ? hookConfig.data.beforeEvaluation : Collections.emptyMap(), + hookConfig.data != null ? hookConfig.data.afterEvaluation : Collections.emptyMap(), + hookConfig.errors != null ? hookConfig.errors.beforeEvaluation : null, + hookConfig.errors != null ? hookConfig.errors.afterEvaluation : null + ); + hookList.add(testHook); + } + builder.hooks(Components.hooks().setHooks(hookList)); + } + + return builder.build(); + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestHook.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestHook.java new file mode 100644 index 0000000..ac979d0 --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestHook.java @@ -0,0 +1,89 @@ +package sdktest; + +import com.launchdarkly.sdk.server.integrations.Hook; +import com.launchdarkly.sdk.server.integrations.HookMetadata; +import com.launchdarkly.sdk.server.integrations.EvaluationSeriesContext; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDValue; + +import sdktest.Representations.EvaluationHookCallbackParams; +import sdktest.Representations.EvaluationSeriesContextParam; + +import java.util.Collections; +import java.util.Map; +import java.util.HashMap; + +class TestHook extends Hook { + + private HookCallbackService callbackService; + private Map beforeEvaluationData; + private Map afterEvaluationData; + private String beforeEvaluationError; + private String afterEvaluationError; + + TestHook(String name, HookCallbackService callbackService, Map beforeEvaluationData, Map afterEvaluationData, String beforeEvaluationError, String afterEvaluationError) { + super(name); + this.callbackService = callbackService; + this.beforeEvaluationData = beforeEvaluationData; + this.afterEvaluationData = afterEvaluationData; + this.beforeEvaluationError = beforeEvaluationError; + this.afterEvaluationError = afterEvaluationError; + } + + @Override + public Map beforeEvaluation(EvaluationSeriesContext seriesContext, Map data) { + + if (this.beforeEvaluationError != null) { + throw new RuntimeException(this.beforeEvaluationError); + } + + EvaluationHookCallbackParams params = new EvaluationHookCallbackParams(); + + EvaluationSeriesContextParam seriesContextParam = new EvaluationSeriesContextParam(); + seriesContextParam.flagKey = seriesContext.flagKey; + seriesContextParam.context = seriesContext.context; + seriesContextParam.defaultValue = seriesContext.defaultValue; + seriesContextParam.method = seriesContext.method; + params.evaluationSeriesContext = seriesContextParam; + + params.evaluationSeriesData = data; + params.stage = "beforeEvaluation"; + callbackService.post(params); + + HashMap newData = new HashMap<>(data); + if (beforeEvaluationData != null) { + newData.putAll(beforeEvaluationData); + } + + return Collections.unmodifiableMap(newData); + } + + @Override + public Map afterEvaluation(EvaluationSeriesContext seriesContext, Map data, EvaluationDetail evaluationDetail) { + + if (this.afterEvaluationError != null) { + throw new RuntimeException(this.afterEvaluationError); + } + + EvaluationHookCallbackParams params = new EvaluationHookCallbackParams(); + + EvaluationSeriesContextParam seriesContextParam = new EvaluationSeriesContextParam(); + seriesContextParam.flagKey = seriesContext.flagKey; + seriesContextParam.context = seriesContext.context; + seriesContextParam.defaultValue = seriesContext.defaultValue; + seriesContextParam.method = seriesContext.method; + params.evaluationSeriesContext = seriesContextParam; + + params.evaluationSeriesData = data; + params.evaluationDetail = evaluationDetail; + params.stage = "afterEvaluation"; + callbackService.post(params); + + HashMap newData = new HashMap<>(data); + if (afterEvaluationData != null) { + newData.putAll(afterEvaluationData); + } + + return Collections.unmodifiableMap(newData); + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestService.java b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestService.java new file mode 100644 index 0000000..c08ba26 --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/java/sdktest/TestService.java @@ -0,0 +1,158 @@ +package sdktest; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestContext; +import com.launchdarkly.testhelpers.httptest.SimpleRouter; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Pattern; + +import okhttp3.OkHttpClient; + +import sdktest.Representations.CommandParams; +import sdktest.Representations.CreateInstanceParams; +import sdktest.Representations.Status; + +public class TestService { + private static final int PORT = 8000; + private static final String[] CAPABILITIES = new String[]{ + "server-side", + "strongly-typed", + "all-flags-client-side-only", + "all-flags-details-only-for-tracked-flags", + "all-flags-with-reasons", + "big-segments", + "context-type", + "service-endpoints", + "tags", + "filtering", + "migrations", + "event-sampling", + "inline-context", + "anonymous-redaction", + "evaluation-hooks" + }; + + static final Gson gson = new GsonBuilder().serializeNulls().create(); + + static final OkHttpClient client = new OkHttpClient(); + + private final Map clients = new ConcurrentHashMap(); + private final AtomicInteger clientCounter = new AtomicInteger(0); + private final String clientVersion; + + private TestService() { + LDClient dummyClient = new LDClient("", new LDConfig.Builder().offline(true).build()); + clientVersion = dummyClient.version(); + try { + dummyClient.close(); + } catch (Exception e) {} + } + + @SuppressWarnings("serial") + public static class BadRequestException extends Exception { + public BadRequestException(String message) { + super(message); + } + } + + public static void main(String[] args) throws Exception { + TestService service = new TestService(); + + SimpleRouter router = new SimpleRouter() + .add("GET", "/", ctx -> service.writeJson(diableKeepAlive(ctx), service.getStatus())) + .add("DELETE", "/", ctx -> service.forceQuit()) + .add("POST", "/", ctx -> service.postCreateClient(diableKeepAlive(ctx))) + .addRegex("POST", Pattern.compile("/clients/(.*)"), ctx -> service.postClientCommand(diableKeepAlive(ctx))) + .addRegex("DELETE", Pattern.compile("/clients/(.*)"), ctx -> service.deleteClient(diableKeepAlive(ctx))); + + HttpServer server = HttpServer.start(PORT, router); + server.getRecorder().setEnabled(false); // don't accumulate a request log + + System.out.println("Listening on port " + PORT); + + // need to explicitly sleep because HttpServer now starts as a daemon thread + while (true) { + Thread.sleep(1000); + } + } + + private static RequestContext diableKeepAlive(RequestContext ctx) { + ctx.addHeader("Connection", "close"); + return ctx; + } + + private Status getStatus() { + Status rep = new Status(); + rep.capabilities = CAPABILITIES; + rep.clientVersion = clientVersion; + return rep; + } + + private void forceQuit() { + System.out.println("Test harness has told us to quit"); + System.exit(0); + } + + private void postCreateClient(RequestContext ctx) { + CreateInstanceParams params = readJson(ctx, CreateInstanceParams.class); + + String clientId = String.valueOf(clientCounter.incrementAndGet()); + SdkClientEntity client = new SdkClientEntity(this, params); + + clients.put(clientId, client); + + ctx.addHeader("Location", "/clients/" + clientId); + } + + private void postClientCommand(RequestContext ctx) { + CommandParams params = readJson(ctx, CommandParams.class); + + String clientId = ctx.getPathParam(0); + SdkClientEntity client = clients.get(clientId); + if (client == null) { + ctx.setStatus(404); + } else { + try { + Object resp = client.doCommand(params); + ctx.setStatus(202); + if (resp != null) { + String json = gson.toJson(resp); + client.logger.info("Sending response: {}", json); + writeJson(ctx, resp); + } + } catch (BadRequestException e) { + ctx.setStatus(400); + } catch (Exception e) { + client.logger.error("Unexpected exception: {}", e); + ctx.setStatus(500); + } + } + } + + private void deleteClient(RequestContext ctx) { + String clientId = ctx.getPathParam(0); + SdkClientEntity client = clients.get(clientId); + if (client == null) { + ctx.setStatus(404); + } else { + client.close(); + } + } + + private T readJson(RequestContext ctx, Class paramsClass) { + return gson.fromJson(ctx.getRequest().getBody(), paramsClass); + } + + private void writeJson(RequestContext ctx, Object data) { + String json = gson.toJson(data); + Handlers.bodyJson(json).apply(ctx); + } +} diff --git a/lib/sdk/server/contract-tests/service/src/main/resources/logback.xml b/lib/sdk/server/contract-tests/service/src/main/resources/logback.xml new file mode 100644 index 0000000..3a604ca --- /dev/null +++ b/lib/sdk/server/contract-tests/service/src/main/resources/logback.xml @@ -0,0 +1,20 @@ + + + + + + %d{yyyy-MM-dd HH:mm:ss} [%logger] %level: %msg%n + + + + + + + + + + + + + + diff --git a/lib/sdk/server/contract-tests/settings.gradle b/lib/sdk/server/contract-tests/settings.gradle new file mode 100644 index 0000000..5c2bf5d --- /dev/null +++ b/lib/sdk/server/contract-tests/settings.gradle @@ -0,0 +1,3 @@ +include ":service" +include ":sdk" +project(":sdk").projectDir = new File("..") diff --git a/lib/sdk/server/gradle.properties b/lib/sdk/server/gradle.properties new file mode 100644 index 0000000..7802165 --- /dev/null +++ b/lib/sdk/server/gradle.properties @@ -0,0 +1,11 @@ +#x-release-please-start-version +version=7.4.1 +#x-release-please-end + +# The following empty ossrh properties are used by LaunchDarkly's internal integration testing framework +# and should not be needed for typical development purposes (including by third-party developers). +ossrhUsername= +ossrhPassword= + +# See https://github.com/gradle/gradle/issues/11308 regarding the following property +systemProp.org.gradle.internal.publish.checksums.insecure=true diff --git a/lib/sdk/server/gradle.properties.example b/lib/sdk/server/gradle.properties.example new file mode 100644 index 0000000..058697d --- /dev/null +++ b/lib/sdk/server/gradle.properties.example @@ -0,0 +1,8 @@ +# To release a version of this SDK, copy this file to ~/.gradle/gradle.properties and fill in the values. +githubUser = YOUR_GITHUB_USERNAME +githubPassword = YOUR_GITHUB_PASSWORD +signing.keyId = 5669D902 +signing.password = SIGNING_PASSWORD +signing.secretKeyRingFile = SECRET_RING_FILE +ossrhUsername = launchdarkly +ossrhPassword = OSSHR_PASSWORD diff --git a/lib/sdk/server/gradle/wrapper/gradle-wrapper.jar b/lib/sdk/server/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..7454180f2ae8848c63b8b4dea2cb829da983f2fa GIT binary patch literal 59536 zcma&NbC71ylI~qywr$(CZQJHswz}-9F59+k+g;UV+cs{`J?GrGXYR~=-ydruB3JCa zB64N^cILAcWk5iofq)<(fq;O7{th4@;QxID0)qN`mJ?GIqLY#rX8-|G{5M0pdVW5^ zzXk$-2kQTAC?_N@B`&6-N-rmVFE=$QD?>*=4<|!MJu@}isLc4AW#{m2if&A5T5g&~ ziuMQeS*U5sL6J698wOd)K@oK@1{peP5&Esut<#VH^u)gp`9H4)`uE!2$>RTctN+^u z=ASkePDZA-X8)rp%D;p*~P?*a_=*Kwc<^>QSH|^<0>o37lt^+Mj1;4YvJ(JR-Y+?%Nu}JAYj5 z_Qc5%Ao#F?q32i?ZaN2OSNhWL;2oDEw_({7ZbgUjna!Fqn3NzLM@-EWFPZVmc>(fZ z0&bF-Ch#p9C{YJT9Rcr3+Y_uR^At1^BxZ#eo>$PLJF3=;t_$2|t+_6gg5(j{TmjYU zK12c&lE?Eh+2u2&6Gf*IdKS&6?rYbSEKBN!rv{YCm|Rt=UlPcW9j`0o6{66#y5t9C zruFA2iKd=H%jHf%ypOkxLnO8#H}#Zt{8p!oi6)7#NqoF({t6|J^?1e*oxqng9Q2Cc zg%5Vu!em)}Yuj?kaP!D?b?(C*w!1;>R=j90+RTkyEXz+9CufZ$C^umX^+4|JYaO<5 zmIM3#dv`DGM;@F6;(t!WngZSYzHx?9&$xEF70D1BvfVj<%+b#)vz)2iLCrTeYzUcL z(OBnNoG6Le%M+@2oo)&jdOg=iCszzv59e zDRCeaX8l1hC=8LbBt|k5?CXgep=3r9BXx1uR8!p%Z|0+4Xro=xi0G!e{c4U~1j6!) zH6adq0}#l{%*1U(Cb%4AJ}VLWKBPi0MoKFaQH6x?^hQ!6em@993xdtS%_dmevzeNl z(o?YlOI=jl(`L9^ z0O+H9k$_@`6L13eTT8ci-V0ljDMD|0ifUw|Q-Hep$xYj0hTO@0%IS^TD4b4n6EKDG z??uM;MEx`s98KYN(K0>c!C3HZdZ{+_53DO%9k5W%pr6yJusQAv_;IA}925Y%;+!tY z%2k!YQmLLOr{rF~!s<3-WEUs)`ix_mSU|cNRBIWxOox_Yb7Z=~Q45ZNe*u|m^|)d* zog=i>`=bTe!|;8F+#H>EjIMcgWcG2ORD`w0WD;YZAy5#s{65~qfI6o$+Ty&-hyMyJ z3Ra~t>R!p=5ZpxA;QkDAoPi4sYOP6>LT+}{xp}tk+<0k^CKCFdNYG(Es>p0gqD)jP zWOeX5G;9(m@?GOG7g;e74i_|SmE?`B2i;sLYwRWKLy0RLW!Hx`=!LH3&k=FuCsM=9M4|GqzA)anEHfxkB z?2iK-u(DC_T1};KaUT@3nP~LEcENT^UgPvp!QC@Dw&PVAhaEYrPey{nkcn(ro|r7XUz z%#(=$7D8uP_uU-oPHhd>>^adbCSQetgSG`e$U|7mr!`|bU0aHl_cmL)na-5x1#OsVE#m*+k84Y^+UMeSAa zbrVZHU=mFwXEaGHtXQq`2ZtjfS!B2H{5A<3(nb-6ARVV8kEmOkx6D2x7~-6hl;*-*}2Xz;J#a8Wn;_B5=m zl3dY;%krf?i-Ok^Pal-}4F`{F@TYPTwTEhxpZK5WCpfD^UmM_iYPe}wpE!Djai6_{ z*pGO=WB47#Xjb7!n2Ma)s^yeR*1rTxp`Mt4sfA+`HwZf%!7ZqGosPkw69`Ix5Ku6G z@Pa;pjzV&dn{M=QDx89t?p?d9gna*}jBly*#1!6}5K<*xDPJ{wv4& zM$17DFd~L*Te3A%yD;Dp9UGWTjRxAvMu!j^Tbc}2v~q^59d4bz zvu#!IJCy(BcWTc`;v$9tH;J%oiSJ_i7s;2`JXZF+qd4C)vY!hyCtl)sJIC{ebI*0> z@x>;EzyBv>AI-~{D6l6{ST=em*U( z(r$nuXY-#CCi^8Z2#v#UXOt`dbYN1z5jzNF2 z411?w)whZrfA20;nl&C1Gi+gk<`JSm+{|*2o<< zqM#@z_D`Cn|0H^9$|Tah)0M_X4c37|KQ*PmoT@%xHc3L1ZY6(p(sNXHa&49Frzto& zR`c~ClHpE~4Z=uKa5S(-?M8EJ$zt0&fJk~p$M#fGN1-y$7!37hld`Uw>Urri(DxLa;=#rK0g4J)pXMC zxzraOVw1+kNWpi#P=6(qxf`zSdUC?D$i`8ZI@F>k6k zz21?d+dw7b&i*>Kv5L(LH-?J%@WnqT7j#qZ9B>|Zl+=> z^U-pV@1y_ptHo4hl^cPRWewbLQ#g6XYQ@EkiP z;(=SU!yhjHp%1&MsU`FV1Z_#K1&(|5n(7IHbx&gG28HNT)*~-BQi372@|->2Aw5It z0CBpUcMA*QvsPy)#lr!lIdCi@1k4V2m!NH)%Px(vu-r(Q)HYc!p zJ^$|)j^E#q#QOgcb^pd74^JUi7fUmMiNP_o*lvx*q%_odv49Dsv$NV;6J z9GOXKomA{2Pb{w}&+yHtH?IkJJu~}Z?{Uk++2mB8zyvh*xhHKE``99>y#TdD z&(MH^^JHf;g(Tbb^&8P*;_i*2&fS$7${3WJtV7K&&(MBV2~)2KB3%cWg#1!VE~k#C z!;A;?p$s{ihyojEZz+$I1)L}&G~ml=udD9qh>Tu(ylv)?YcJT3ihapi!zgPtWb*CP zlLLJSRCj-^w?@;RU9aL2zDZY1`I3d<&OMuW=c3$o0#STpv_p3b9Wtbql>w^bBi~u4 z3D8KyF?YE?=HcKk!xcp@Cigvzy=lnFgc^9c%(^F22BWYNAYRSho@~*~S)4%AhEttv zvq>7X!!EWKG?mOd9&n>vvH1p4VzE?HCuxT-u+F&mnsfDI^}*-d00-KAauEaXqg3k@ zy#)MGX!X;&3&0s}F3q40ZmVM$(H3CLfpdL?hB6nVqMxX)q=1b}o_PG%r~hZ4gUfSp zOH4qlEOW4OMUc)_m)fMR_rl^pCfXc{$fQbI*E&mV77}kRF z&{<06AJyJ!e863o-V>FA1a9Eemx6>^F$~9ppt()ZbPGfg_NdRXBWoZnDy2;#ODgf! zgl?iOcF7Meo|{AF>KDwTgYrJLb$L2%%BEtO>T$C?|9bAB&}s;gI?lY#^tttY&hfr# zKhC+&b-rpg_?~uVK%S@mQleU#_xCsvIPK*<`E0fHE1&!J7!xD#IB|SSPW6-PyuqGn3^M^Rz%WT{e?OI^svARX&SAdU77V(C~ zM$H{Kg59op{<|8ry9ecfP%=kFm(-!W&?U0@<%z*+!*<e0XesMxRFu9QnGqun6R_%T+B%&9Dtk?*d$Q zb~>84jEAPi@&F@3wAa^Lzc(AJz5gsfZ7J53;@D<;Klpl?sK&u@gie`~vTsbOE~Cd4 z%kr56mI|#b(Jk&;p6plVwmNB0H@0SmgdmjIn5Ne@)}7Vty(yb2t3ev@22AE^s!KaN zyQ>j+F3w=wnx7w@FVCRe+`vUH)3gW%_72fxzqX!S&!dchdkRiHbXW1FMrIIBwjsai8`CB2r4mAbwp%rrO>3B$Zw;9=%fXI9B{d(UzVap7u z6piC-FQ)>}VOEuPpuqznpY`hN4dGa_1Xz9rVg(;H$5Te^F0dDv*gz9JS<|>>U0J^# z6)(4ICh+N_Q`Ft0hF|3fSHs*?a=XC;e`sJaU9&d>X4l?1W=|fr!5ShD|nv$GK;j46@BV6+{oRbWfqOBRb!ir88XD*SbC(LF}I1h#6@dvK%Toe%@ zhDyG$93H8Eu&gCYddP58iF3oQH*zLbNI;rN@E{T9%A8!=v#JLxKyUe}e}BJpB{~uN zqgxRgo0*-@-iaHPV8bTOH(rS(huwK1Xg0u+e!`(Irzu@Bld&s5&bWgVc@m7;JgELd zimVs`>vQ}B_1(2#rv#N9O`fJpVfPc7V2nv34PC);Dzbb;p!6pqHzvy?2pD&1NE)?A zt(t-ucqy@wn9`^MN5apa7K|L=9>ISC>xoc#>{@e}m#YAAa1*8-RUMKwbm|;5p>T`Z zNf*ph@tnF{gmDa3uwwN(g=`Rh)4!&)^oOy@VJaK4lMT&5#YbXkl`q?<*XtsqD z9PRK6bqb)fJw0g-^a@nu`^?71k|m3RPRjt;pIkCo1{*pdqbVs-Yl>4E>3fZx3Sv44grW=*qdSoiZ9?X0wWyO4`yDHh2E!9I!ZFi zVL8|VtW38}BOJHW(Ax#KL_KQzarbuE{(%TA)AY)@tY4%A%P%SqIU~8~-Lp3qY;U-} z`h_Gel7;K1h}7$_5ZZT0&%$Lxxr-<89V&&TCsu}LL#!xpQ1O31jaa{U34~^le*Y%L za?7$>Jk^k^pS^_M&cDs}NgXlR>16AHkSK-4TRaJSh#h&p!-!vQY%f+bmn6x`4fwTp z$727L^y`~!exvmE^W&#@uY!NxJi`g!i#(++!)?iJ(1)2Wk;RN zFK&O4eTkP$Xn~4bB|q8y(btx$R#D`O@epi4ofcETrx!IM(kWNEe42Qh(8*KqfP(c0 zouBl6>Fc_zM+V;F3znbo{x#%!?mH3`_ANJ?y7ppxS@glg#S9^MXu|FM&ynpz3o&Qh z2ujAHLF3($pH}0jXQsa#?t--TnF1P73b?4`KeJ9^qK-USHE)4!IYgMn-7z|=ALF5SNGkrtPG@Y~niUQV2?g$vzJN3nZ{7;HZHzWAeQ;5P|@Tl3YHpyznGG4-f4=XflwSJY+58-+wf?~Fg@1p1wkzuu-RF3j2JX37SQUc? zQ4v%`V8z9ZVZVqS8h|@@RpD?n0W<=hk=3Cf8R?d^9YK&e9ZybFY%jdnA)PeHvtBe- zhMLD+SSteHBq*q)d6x{)s1UrsO!byyLS$58WK;sqip$Mk{l)Y(_6hEIBsIjCr5t>( z7CdKUrJTrW%qZ#1z^n*Lb8#VdfzPw~OIL76aC+Rhr<~;4Tl!sw?Rj6hXj4XWa#6Tp z@)kJ~qOV)^Rh*-?aG>ic2*NlC2M7&LUzc9RT6WM%Cpe78`iAowe!>(T0jo&ivn8-7 zs{Qa@cGy$rE-3AY0V(l8wjI^uB8Lchj@?L}fYal^>T9z;8juH@?rG&g-t+R2dVDBe zq!K%{e-rT5jX19`(bP23LUN4+_zh2KD~EAYzhpEO3MUG8@}uBHH@4J zd`>_(K4q&>*k82(dDuC)X6JuPrBBubOg7qZ{?x!r@{%0);*`h*^F|%o?&1wX?Wr4b z1~&cy#PUuES{C#xJ84!z<1tp9sfrR(i%Tu^jnXy;4`Xk;AQCdFC@?V%|; zySdC7qS|uQRcH}EFZH%mMB~7gi}a0utE}ZE_}8PQH8f;H%PN41Cb9R%w5Oi5el^fd z$n{3SqLCnrF##x?4sa^r!O$7NX!}&}V;0ZGQ&K&i%6$3C_dR%I7%gdQ;KT6YZiQrW zk%q<74oVBV>@}CvJ4Wj!d^?#Zwq(b$E1ze4$99DuNg?6t9H}k_|D7KWD7i0-g*EO7 z;5{hSIYE4DMOK3H%|f5Edx+S0VI0Yw!tsaRS2&Il2)ea^8R5TG72BrJue|f_{2UHa z@w;^c|K3da#$TB0P3;MPlF7RuQeXT$ zS<<|C0OF(k)>fr&wOB=gP8!Qm>F41u;3esv7_0l%QHt(~+n; zf!G6%hp;Gfa9L9=AceiZs~tK+Tf*Wof=4!u{nIO90jH@iS0l+#%8=~%ASzFv7zqSB^?!@N7)kp0t&tCGLmzXSRMRyxCmCYUD2!B`? zhs$4%KO~m=VFk3Buv9osha{v+mAEq=ik3RdK@;WWTV_g&-$U4IM{1IhGX{pAu%Z&H zFfwCpUsX%RKg);B@7OUzZ{Hn{q6Vv!3#8fAg!P$IEx<0vAx;GU%}0{VIsmFBPq_mb zpe^BChDK>sc-WLKl<6 zwbW|e&d&dv9Wu0goueyu>(JyPx1mz0v4E?cJjFuKF71Q1)AL8jHO$!fYT3(;U3Re* zPPOe%*O+@JYt1bW`!W_1!mN&=w3G9ru1XsmwfS~BJ))PhD(+_J_^N6j)sx5VwbWK| zwRyC?W<`pOCY)b#AS?rluxuuGf-AJ=D!M36l{ua?@SJ5>e!IBr3CXIxWw5xUZ@Xrw z_R@%?{>d%Ld4p}nEsiA@v*nc6Ah!MUs?GA7e5Q5lPpp0@`%5xY$C;{%rz24$;vR#* zBP=a{)K#CwIY%p} zXVdxTQ^HS@O&~eIftU+Qt^~(DGxrdi3k}DdT^I7Iy5SMOp$QuD8s;+93YQ!OY{eB24%xY7ml@|M7I(Nb@K_-?F;2?et|CKkuZK_>+>Lvg!>JE~wN`BI|_h6$qi!P)+K-1Hh(1;a`os z55)4Q{oJiA(lQM#;w#Ta%T0jDNXIPM_bgESMCDEg6rM33anEr}=|Fn6)|jBP6Y}u{ zv9@%7*#RI9;fv;Yii5CI+KrRdr0DKh=L>)eO4q$1zmcSmglsV`*N(x=&Wx`*v!!hn6X-l0 zP_m;X??O(skcj+oS$cIdKhfT%ABAzz3w^la-Ucw?yBPEC+=Pe_vU8nd-HV5YX6X8r zZih&j^eLU=%*;VzhUyoLF;#8QsEfmByk+Y~caBqSvQaaWf2a{JKB9B>V&r?l^rXaC z8)6AdR@Qy_BxQrE2Fk?ewD!SwLuMj@&d_n5RZFf7=>O>hzVE*seW3U?_p|R^CfoY`?|#x9)-*yjv#lo&zP=uI`M?J zbzC<^3x7GfXA4{FZ72{PE*-mNHyy59Q;kYG@BB~NhTd6pm2Oj=_ zizmD?MKVRkT^KmXuhsk?eRQllPo2Ubk=uCKiZ&u3Xjj~<(!M94c)Tez@9M1Gfs5JV z->@II)CDJOXTtPrQudNjE}Eltbjq>6KiwAwqvAKd^|g!exgLG3;wP+#mZYr`cy3#39e653d=jrR-ulW|h#ddHu(m9mFoW~2yE zz5?dB%6vF}+`-&-W8vy^OCxm3_{02royjvmwjlp+eQDzFVEUiyO#gLv%QdDSI#3W* z?3!lL8clTaNo-DVJw@ynq?q!%6hTQi35&^>P85G$TqNt78%9_sSJt2RThO|JzM$iL zg|wjxdMC2|Icc5rX*qPL(coL!u>-xxz-rFiC!6hD1IR%|HSRsV3>Kq~&vJ=s3M5y8SG%YBQ|{^l#LGlg!D?E>2yR*eV%9m$_J6VGQ~AIh&P$_aFbh zULr0Z$QE!QpkP=aAeR4ny<#3Fwyw@rZf4?Ewq`;mCVv}xaz+3ni+}a=k~P+yaWt^L z@w67!DqVf7D%7XtXX5xBW;Co|HvQ8WR1k?r2cZD%U;2$bsM%u8{JUJ5Z0k= zZJARv^vFkmWx15CB=rb=D4${+#DVqy5$C%bf`!T0+epLJLnh1jwCdb*zuCL}eEFvE z{rO1%gxg>1!W(I!owu*mJZ0@6FM(?C+d*CeceZRW_4id*D9p5nzMY&{mWqrJomjIZ z97ZNnZ3_%Hx8dn;H>p8m7F#^2;T%yZ3H;a&N7tm=Lvs&lgJLW{V1@h&6Vy~!+Ffbb zv(n3+v)_D$}dqd!2>Y2B)#<+o}LH#%ogGi2-?xRIH)1!SD)u-L65B&bsJTC=LiaF+YOCif2dUX6uAA|#+vNR z>U+KQekVGon)Yi<93(d!(yw1h3&X0N(PxN2{%vn}cnV?rYw z$N^}_o!XUB!mckL`yO1rnUaI4wrOeQ(+&k?2mi47hzxSD`N#-byqd1IhEoh!PGq>t z_MRy{5B0eKY>;Ao3z$RUU7U+i?iX^&r739F)itdrTpAi-NN0=?^m%?{A9Ly2pVv>Lqs6moTP?T2-AHqFD-o_ znVr|7OAS#AEH}h8SRPQ@NGG47dO}l=t07__+iK8nHw^(AHx&Wb<%jPc$$jl6_p(b$ z)!pi(0fQodCHfM)KMEMUR&UID>}m^(!{C^U7sBDOA)$VThRCI0_+2=( zV8mMq0R(#z;C|7$m>$>`tX+T|xGt(+Y48@ZYu#z;0pCgYgmMVbFb!$?%yhZqP_nhn zy4<#3P1oQ#2b51NU1mGnHP$cf0j-YOgAA}A$QoL6JVLcmExs(kU{4z;PBHJD%_=0F z>+sQV`mzijSIT7xn%PiDKHOujX;n|M&qr1T@rOxTdxtZ!&u&3HHFLYD5$RLQ=heur zb>+AFokUVQeJy-#LP*^)spt{mb@Mqe=A~-4p0b+Bt|pZ+@CY+%x}9f}izU5;4&QFE zO1bhg&A4uC1)Zb67kuowWY4xbo&J=%yoXlFB)&$d*-}kjBu|w!^zbD1YPc0-#XTJr z)pm2RDy%J3jlqSMq|o%xGS$bPwn4AqitC6&e?pqWcjWPt{3I{>CBy;hg0Umh#c;hU3RhCUX=8aR>rmd` z7Orw(5tcM{|-^J?ZAA9KP|)X6n9$-kvr#j5YDecTM6n z&07(nD^qb8hpF0B^z^pQ*%5ePYkv&FabrlI61ntiVp!!C8y^}|<2xgAd#FY=8b*y( zuQOuvy2`Ii^`VBNJB&R!0{hABYX55ooCAJSSevl4RPqEGb)iy_0H}v@vFwFzD%>#I>)3PsouQ+_Kkbqy*kKdHdfkN7NBcq%V{x^fSxgXpg7$bF& zj!6AQbDY(1u#1_A#1UO9AxiZaCVN2F0wGXdY*g@x$ByvUA?ePdide0dmr#}udE%K| z3*k}Vv2Ew2u1FXBaVA6aerI36R&rzEZeDDCl5!t0J=ug6kuNZzH>3i_VN`%BsaVB3 zQYw|Xub_SGf{)F{$ZX5`Jc!X!;eybjP+o$I{Z^Hsj@D=E{MnnL+TbC@HEU2DjG{3-LDGIbq()U87x4eS;JXnSh;lRlJ z>EL3D>wHt-+wTjQF$fGyDO$>d+(fq@bPpLBS~xA~R=3JPbS{tzN(u~m#Po!?H;IYv zE;?8%^vle|%#oux(Lj!YzBKv+Fd}*Ur-dCBoX*t{KeNM*n~ZPYJ4NNKkI^MFbz9!v z4(Bvm*Kc!-$%VFEewYJKz-CQN{`2}KX4*CeJEs+Q(!kI%hN1!1P6iOq?ovz}X0IOi z)YfWpwW@pK08^69#wSyCZkX9?uZD?C^@rw^Y?gLS_xmFKkooyx$*^5#cPqntNTtSG zlP>XLMj2!VF^0k#ole7`-c~*~+_T5ls?x4)ah(j8vo_ zwb%S8qoaZqY0-$ZI+ViIA_1~~rAH7K_+yFS{0rT@eQtTAdz#8E5VpwnW!zJ_^{Utv zlW5Iar3V5t&H4D6A=>?mq;G92;1cg9a2sf;gY9pJDVKn$DYdQlvfXq}zz8#LyPGq@ z+`YUMD;^-6w&r-82JL7mA8&M~Pj@aK!m{0+^v<|t%APYf7`}jGEhdYLqsHW-Le9TL z_hZZ1gbrz7$f9^fAzVIP30^KIz!!#+DRLL+qMszvI_BpOSmjtl$hh;&UeM{ER@INV zcI}VbiVTPoN|iSna@=7XkP&-4#06C};8ajbxJ4Gcq8(vWv4*&X8bM^T$mBk75Q92j z1v&%a;OSKc8EIrodmIiw$lOES2hzGDcjjB`kEDfJe{r}yE6`eZL zEB`9u>Cl0IsQ+t}`-cx}{6jqcANucqIB>Qmga_&<+80E2Q|VHHQ$YlAt{6`Qu`HA3 z03s0-sSlwbvgi&_R8s={6<~M^pGvBNjKOa>tWenzS8s zR>L7R5aZ=mSU{f?ib4Grx$AeFvtO5N|D>9#)ChH#Fny2maHWHOf2G=#<9Myot#+4u zWVa6d^Vseq_0=#AYS(-m$Lp;*8nC_6jXIjEM`omUmtH@QDs3|G)i4j*#_?#UYVZvJ z?YjT-?!4Q{BNun;dKBWLEw2C-VeAz`%?A>p;)PL}TAZn5j~HK>v1W&anteARlE+~+ zj>c(F;?qO3pXBb|#OZdQnm<4xWmn~;DR5SDMxt0UK_F^&eD|KZ=O;tO3vy4@4h^;2 zUL~-z`-P1aOe?|ZC1BgVsL)2^J-&vIFI%q@40w0{jjEfeVl)i9(~bt2z#2Vm)p`V_ z1;6$Ae7=YXk#=Qkd24Y23t&GvRxaOoad~NbJ+6pxqzJ>FY#Td7@`N5xp!n(c!=RE& z&<<@^a$_Ys8jqz4|5Nk#FY$~|FPC0`*a5HH!|Gssa9=~66&xG9)|=pOOJ2KE5|YrR zw!w6K2aC=J$t?L-;}5hn6mHd%hC;p8P|Dgh6D>hGnXPgi;6r+eA=?f72y9(Cf_ho{ zH6#)uD&R=73^$$NE;5piWX2bzR67fQ)`b=85o0eOLGI4c-Tb@-KNi2pz=Ke@SDcPn za$AxXib84`!Sf;Z3B@TSo`Dz7GM5Kf(@PR>Ghzi=BBxK8wRp>YQoXm+iL>H*Jo9M3 z6w&E?BC8AFTFT&Tv8zf+m9<&S&%dIaZ)Aoqkak_$r-2{$d~0g2oLETx9Y`eOAf14QXEQw3tJne;fdzl@wV#TFXSLXM2428F-Q}t+n2g%vPRMUzYPvzQ9f# zu(liiJem9P*?0%V@RwA7F53r~|I!Ty)<*AsMX3J{_4&}{6pT%Tpw>)^|DJ)>gpS~1rNEh z0$D?uO8mG?H;2BwM5a*26^7YO$XjUm40XmBsb63MoR;bJh63J;OngS5sSI+o2HA;W zdZV#8pDpC9Oez&L8loZO)MClRz!_!WD&QRtQxnazhT%Vj6Wl4G11nUk8*vSeVab@N#oJ}`KyJv+8Mo@T1-pqZ1t|?cnaVOd;1(h9 z!$DrN=jcGsVYE-0-n?oCJ^4x)F}E;UaD-LZUIzcD?W^ficqJWM%QLy6QikrM1aKZC zi{?;oKwq^Vsr|&`i{jIphA8S6G4)$KGvpULjH%9u(Dq247;R#l&I0{IhcC|oBF*Al zvLo7Xte=C{aIt*otJD}BUq)|_pdR>{zBMT< z(^1RpZv*l*m*OV^8>9&asGBo8h*_4q*)-eCv*|Pq=XNGrZE)^(SF7^{QE_~4VDB(o zVcPA_!G+2CAtLbl+`=Q~9iW`4ZRLku!uB?;tWqVjB0lEOf}2RD7dJ=BExy=<9wkb- z9&7{XFA%n#JsHYN8t5d~=T~5DcW4$B%3M+nNvC2`0!#@sckqlzo5;hhGi(D9=*A4` z5ynobawSPRtWn&CDLEs3Xf`(8^zDP=NdF~F^s&={l7(aw&EG}KWpMjtmz7j_VLO;@ zM2NVLDxZ@GIv7*gzl1 zjq78tv*8#WSY`}Su0&C;2F$Ze(q>F(@Wm^Gw!)(j;dk9Ad{STaxn)IV9FZhm*n+U} zi;4y*3v%A`_c7a__DJ8D1b@dl0Std3F||4Wtvi)fCcBRh!X9$1x!_VzUh>*S5s!oq z;qd{J_r79EL2wIeiGAqFstWtkfIJpjVh%zFo*=55B9Zq~y0=^iqHWfQl@O!Ak;(o*m!pZqe9 z%U2oDOhR)BvW8&F70L;2TpkzIutIvNQaTjjs5V#8mV4!NQ}zN=i`i@WI1z0eN-iCS z;vL-Wxc^Vc_qK<5RPh(}*8dLT{~GzE{w2o$2kMFaEl&q zP{V=>&3kW7tWaK-Exy{~`v4J0U#OZBk{a9{&)&QG18L@6=bsZ1zC_d{{pKZ-Ey>I> z;8H0t4bwyQqgu4hmO`3|4K{R*5>qnQ&gOfdy?z`XD%e5+pTDzUt3`k^u~SaL&XMe= z9*h#kT(*Q9jO#w2Hd|Mr-%DV8i_1{J1MU~XJ3!WUplhXDYBpJH><0OU`**nIvPIof z|N8@I=wA)sf45SAvx||f?Z5uB$kz1qL3Ky_{%RPdP5iN-D2!p5scq}buuC00C@jom zhfGKm3|f?Z0iQ|K$Z~!`8{nmAS1r+fp6r#YDOS8V*;K&Gs7Lc&f^$RC66O|)28oh`NHy&vq zJh+hAw8+ybTB0@VhWN^0iiTnLsCWbS_y`^gs!LX!Lw{yE``!UVzrV24tP8o;I6-65 z1MUiHw^{bB15tmrVT*7-#sj6cs~z`wk52YQJ*TG{SE;KTm#Hf#a~|<(|ImHH17nNM z`Ub{+J3dMD!)mzC8b(2tZtokKW5pAwHa?NFiso~# z1*iaNh4lQ4TS)|@G)H4dZV@l*Vd;Rw;-;odDhW2&lJ%m@jz+Panv7LQm~2Js6rOW3 z0_&2cW^b^MYW3)@o;neZ<{B4c#m48dAl$GCc=$>ErDe|?y@z`$uq3xd(%aAsX)D%l z>y*SQ%My`yDP*zof|3@_w#cjaW_YW4BdA;#Glg1RQcJGY*CJ9`H{@|D+*e~*457kd z73p<%fB^PV!Ybw@)Dr%(ZJbX}xmCStCYv#K3O32ej{$9IzM^I{6FJ8!(=azt7RWf4 z7ib0UOPqN40X!wOnFOoddd8`!_IN~9O)#HRTyjfc#&MCZ zZAMzOVB=;qwt8gV?{Y2?b=iSZG~RF~uyx18K)IDFLl})G1v@$(s{O4@RJ%OTJyF+Cpcx4jmy|F3euCnMK!P2WTDu5j z{{gD$=M*pH!GGzL%P)V2*ROm>!$Y=z|D`!_yY6e7SU$~a5q8?hZGgaYqaiLnkK%?0 zs#oI%;zOxF@g*@(V4p!$7dS1rOr6GVs6uYCTt2h)eB4?(&w8{#o)s#%gN@BBosRUe z)@P@8_Zm89pr~)b>e{tbPC~&_MR--iB{=)y;INU5#)@Gix-YpgP<-c2Ms{9zuCX|3 z!p(?VaXww&(w&uBHzoT%!A2=3HAP>SDxcljrego7rY|%hxy3XlODWffO_%g|l+7Y_ zqV(xbu)s4lV=l7M;f>vJl{`6qBm>#ZeMA}kXb97Z)?R97EkoI?x6Lp0yu1Z>PS?2{ z0QQ(8D)|lc9CO3B~e(pQM&5(1y&y=e>C^X$`)_&XuaI!IgDTVqt31wX#n+@!a_A0ZQkA zCJ2@M_4Gb5MfCrm5UPggeyh)8 zO9?`B0J#rkoCx(R0I!ko_2?iO@|oRf1;3r+i)w-2&j?=;NVIdPFsB)`|IC0zk6r9c zRrkfxWsiJ(#8QndNJj@{@WP2Ackr|r1VxV{7S&rSU(^)-M8gV>@UzOLXu9K<{6e{T zXJ6b92r$!|lwjhmgqkdswY&}c)KW4A)-ac%sU;2^fvq7gfUW4Bw$b!i@duy1CAxSn z(pyh$^Z=&O-q<{bZUP+$U}=*#M9uVc>CQVgDs4swy5&8RAHZ~$)hrTF4W zPsSa~qYv_0mJnF89RnnJTH`3}w4?~epFl=D(35$ zWa07ON$`OMBOHgCmfO(9RFc<)?$x)N}Jd2A(<*Ll7+4jrRt9w zwGxExUXd9VB#I|DwfxvJ;HZ8Q{37^wDhaZ%O!oO(HpcqfLH%#a#!~;Jl7F5>EX_=8 z{()l2NqPz>La3qJR;_v+wlK>GsHl;uRA8%j`A|yH@k5r%55S9{*Cp%uw6t`qc1!*T za2OeqtQj7sAp#Q~=5Fs&aCR9v>5V+s&RdNvo&H~6FJOjvaj--2sYYBvMq;55%z8^o z|BJDA4vzfow#DO#ZQHh;Oq_{r+qP{R9ox2TOgwQiv7Ow!zjN+A@BN;0tA2lUb#+zO z(^b89eV)D7UVE+h{mcNc6&GtpOqDn_?VAQ)Vob$hlFwW%xh>D#wml{t&Ofmm_d_+; zKDxzdr}`n2Rw`DtyIjrG)eD0vut$}dJAZ0AohZ+ZQdWXn_Z@dI_y=7t3q8x#pDI-K z2VVc&EGq445Rq-j0=U=Zx`oBaBjsefY;%)Co>J3v4l8V(T8H?49_@;K6q#r~Wwppc z4XW0(4k}cP=5ex>-Xt3oATZ~bBWKv)aw|I|Lx=9C1s~&b77idz({&q3T(Y(KbWO?+ zmcZ6?WeUsGk6>km*~234YC+2e6Zxdl~<_g2J|IE`GH%n<%PRv-50; zH{tnVts*S5*_RxFT9eM0z-pksIb^drUq4>QSww=u;UFCv2AhOuXE*V4z?MM`|ABOC4P;OfhS(M{1|c%QZ=!%rQTDFx`+}?Kdx$&FU?Y<$x;j7z=(;Lyz+?EE>ov!8vvMtSzG!nMie zsBa9t8as#2nH}n8xzN%W%U$#MHNXmDUVr@GX{?(=yI=4vks|V)!-W5jHsU|h_&+kY zS_8^kd3jlYqOoiI`ZqBVY!(UfnAGny!FowZWY_@YR0z!nG7m{{)4OS$q&YDyw6vC$ zm4!$h>*|!2LbMbxS+VM6&DIrL*X4DeMO!@#EzMVfr)e4Tagn~AQHIU8?e61TuhcKD zr!F4(kEebk(Wdk-?4oXM(rJwanS>Jc%<>R(siF+>+5*CqJLecP_we33iTFTXr6W^G z7M?LPC-qFHK;E!fxCP)`8rkxZyFk{EV;G-|kwf4b$c1k0atD?85+|4V%YATWMG|?K zLyLrws36p%Qz6{}>7b>)$pe>mR+=IWuGrX{3ZPZXF3plvuv5Huax86}KX*lbPVr}L z{C#lDjdDeHr~?l|)Vp_}T|%$qF&q#U;ClHEPVuS+Jg~NjC1RP=17=aQKGOcJ6B3mp z8?4*-fAD~}sX*=E6!}^u8)+m2j<&FSW%pYr_d|p_{28DZ#Cz0@NF=gC-o$MY?8Ca8 zr5Y8DSR^*urS~rhpX^05r30Ik#2>*dIOGxRm0#0YX@YQ%Mg5b6dXlS!4{7O_kdaW8PFSdj1=ryI-=5$fiieGK{LZ+SX(1b=MNL!q#lN zv98?fqqTUH8r8C7v(cx#BQ5P9W>- zmW93;eH6T`vuJ~rqtIBg%A6>q>gnWb3X!r0wh_q;211+Om&?nvYzL1hhtjB zK_7G3!n7PL>d!kj){HQE zE8(%J%dWLh1_k%gVXTZt zEdT09XSKAx27Ncaq|(vzL3gm83q>6CAw<$fTnMU05*xAe&rDfCiu`u^1)CD<>sx0i z*hr^N_TeN89G(nunZoLBf^81#pmM}>JgD@Nn1l*lN#a=B=9pN%tmvYFjFIoKe_(GF z-26x{(KXdfsQL7Uv6UtDuYwV`;8V3w>oT_I<`Ccz3QqK9tYT5ZQzbop{=I=!pMOCb zCU68`n?^DT%^&m>A%+-~#lvF!7`L7a{z<3JqIlk1$<||_J}vW1U9Y&eX<}l8##6i( zZcTT@2`9(Mecptm@{3A_Y(X`w9K0EwtPq~O!16bq{7c0f7#(3wn-^)h zxV&M~iiF!{-6A@>o;$RzQ5A50kxXYj!tcgme=Qjrbje~;5X2xryU;vH|6bE(8z^<7 zQ>BG7_c*JG8~K7Oe68i#0~C$v?-t@~@r3t2inUnLT(c=URpA9kA8uq9PKU(Ps(LVH zqgcqW>Gm?6oV#AldDPKVRcEyQIdTT`Qa1j~vS{<;SwyTdr&3*t?J)y=M7q*CzucZ&B0M=joT zBbj@*SY;o2^_h*>R0e({!QHF0=)0hOj^B^d*m>SnRrwq>MolNSgl^~r8GR#mDWGYEIJA8B<|{{j?-7p zVnV$zancW3&JVDtVpIlI|5djKq0(w$KxEFzEiiL=h5Jw~4Le23@s(mYyXWL9SX6Ot zmb)sZaly_P%BeX_9 zw&{yBef8tFm+%=--m*J|o~+Xg3N+$IH)t)=fqD+|fEk4AAZ&!wcN5=mi~Vvo^i`}> z#_3ahR}Ju)(Px7kev#JGcSwPXJ2id9%Qd2A#Uc@t8~egZ8;iC{e! z%=CGJOD1}j!HW_sgbi_8suYnn4#Ou}%9u)dXd3huFIb!ytlX>Denx@pCS-Nj$`VO&j@(z!kKSP0hE4;YIP#w9ta=3DO$7f*x zc9M4&NK%IrVmZAe=r@skWD`AEWH=g+r|*13Ss$+{c_R!b?>?UaGXlw*8qDmY#xlR= z<0XFbs2t?8i^G~m?b|!Hal^ZjRjt<@a? z%({Gn14b4-a|#uY^=@iiKH+k?~~wTj5K1A&hU z2^9-HTC)7zpoWK|$JXaBL6C z#qSNYtY>65T@Zs&-0cHeu|RX(Pxz6vTITdzJdYippF zC-EB+n4}#lM7`2Ry~SO>FxhKboIAF#Z{1wqxaCb{#yEFhLuX;Rx(Lz%T`Xo1+a2M}7D+@wol2)OJs$TwtRNJ={( zD@#zTUEE}#Fz#&(EoD|SV#bayvr&E0vzmb%H?o~46|FAcx?r4$N z&67W3mdip-T1RIxwSm_&(%U|+WvtGBj*}t69XVd&ebn>KOuL(7Y8cV?THd-(+9>G7*Nt%T zcH;`p={`SOjaf7hNd(=37Lz3-51;58JffzIPgGs_7xIOsB5p2t&@v1mKS$2D$*GQ6 zM(IR*j4{nri7NMK9xlDy-hJW6sW|ZiDRaFiayj%;(%51DN!ZCCCXz+0Vm#};70nOx zJ#yA0P3p^1DED;jGdPbQWo0WATN=&2(QybbVdhd=Vq*liDk`c7iZ?*AKEYC#SY&2g z&Q(Ci)MJ{mEat$ZdSwTjf6h~roanYh2?9j$CF@4hjj_f35kTKuGHvIs9}Re@iKMxS-OI*`0S z6s)fOtz}O$T?PLFVSeOjSO26$@u`e<>k(OSP!&YstH3ANh>)mzmKGNOwOawq-MPXe zy4xbeUAl6tamnx))-`Gi2uV5>9n(73yS)Ukma4*7fI8PaEwa)dWHs6QA6>$}7?(L8 ztN8M}?{Tf!Zu22J5?2@95&rQ|F7=FK-hihT-vDp!5JCcWrVogEnp;CHenAZ)+E+K5 z$Cffk5sNwD_?4+ymgcHR(5xgt20Z8M`2*;MzOM#>yhk{r3x=EyM226wb&!+j`W<%* zSc&|`8!>dn9D@!pYow~(DsY_naSx7(Z4i>cu#hA5=;IuI88}7f%)bRkuY2B;+9Uep zpXcvFWkJ!mQai63BgNXG26$5kyhZ2&*3Q_tk)Ii4M>@p~_~q_cE!|^A;_MHB;7s#9 zKzMzK{lIxotjc};k67^Xsl-gS!^*m*m6kn|sbdun`O?dUkJ{0cmI0-_2y=lTAfn*Y zKg*A-2sJq)CCJgY0LF-VQvl&6HIXZyxo2#!O&6fOhbHXC?%1cMc6y^*dOS{f$=137Ds1m01qs`>iUQ49JijsaQ( zksqV9@&?il$|4Ua%4!O15>Zy&%gBY&wgqB>XA3!EldQ%1CRSM(pp#k~-pkcCg4LAT zXE=puHbgsw)!xtc@P4r~Z}nTF=D2~j(6D%gTBw$(`Fc=OOQ0kiW$_RDd=hcO0t97h zb86S5r=>(@VGy1&#S$Kg_H@7G^;8Ue)X5Y+IWUi`o;mpvoV)`fcVk4FpcT|;EG!;? zHG^zrVVZOm>1KFaHlaogcWj(v!S)O(Aa|Vo?S|P z5|6b{qkH(USa*Z7-y_Uvty_Z1|B{rTS^qmEMLEYUSk03_Fg&!O3BMo{b^*`3SHvl0 zhnLTe^_vVIdcSHe)SQE}r~2dq)VZJ!aSKR?RS<(9lzkYo&dQ?mubnWmgMM37Nudwo z3Vz@R{=m2gENUE3V4NbIzAA$H1z0pagz94-PTJyX{b$yndsdKptmlKQKaaHj@3=ED zc7L?p@%ui|RegVYutK$64q4pe9+5sv34QUpo)u{1ci?)_7gXQd{PL>b0l(LI#rJmN zGuO+%GO`xneFOOr4EU(Wg}_%bhzUf;d@TU+V*2#}!2OLwg~%D;1FAu=Un>OgjPb3S z7l(riiCwgghC=Lm5hWGf5NdGp#01xQ59`HJcLXbUR3&n%P(+W2q$h2Qd z*6+-QXJ*&Kvk9ht0f0*rO_|FMBALen{j7T1l%=Q>gf#kma zQlg#I9+HB+z*5BMxdesMND`_W;q5|FaEURFk|~&{@qY32N$G$2B=&Po{=!)x5b!#n zxLzblkq{yj05#O7(GRuT39(06FJlalyv<#K4m}+vs>9@q-&31@1(QBv82{}Zkns~K ze{eHC_RDX0#^A*JQTwF`a=IkE6Ze@j#-8Q`tTT?k9`^ZhA~3eCZJ-Jr{~7Cx;H4A3 zcZ+Zj{mzFZbVvQ6U~n>$U2ZotGsERZ@}VKrgGh0xM;Jzt29%TX6_&CWzg+YYMozrM z`nutuS)_0dCM8UVaKRj804J4i%z2BA_8A4OJRQ$N(P9Mfn-gF;4#q788C@9XR0O3< zsoS4wIoyt046d+LnSCJOy@B@Uz*#GGd#+Ln1ek5Dv>(ZtD@tgZlPnZZJGBLr^JK+!$$?A_fA3LOrkoDRH&l7 zcMcD$Hsjko3`-{bn)jPL6E9Ds{WskMrivsUu5apD z?grQO@W7i5+%X&E&p|RBaEZ(sGLR@~(y^BI@lDMot^Ll?!`90KT!JXUhYS`ZgX3jnu@Ja^seA*M5R@f`=`ynQV4rc$uT1mvE?@tz)TN<=&H1%Z?5yjxcpO+6y_R z6EPuPKM5uxKpmZfT(WKjRRNHs@ib)F5WAP7QCADvmCSD#hPz$V10wiD&{NXyEwx5S z6NE`3z!IS^$s7m}PCwQutVQ#~w+V z=+~->DI*bR2j0^@dMr9`p>q^Ny~NrAVxrJtX2DUveic5vM%#N*XO|?YAWwNI$Q)_) zvE|L(L1jP@F%gOGtnlXtIv2&1i8q<)Xfz8O3G^Ea~e*HJsQgBxWL(yuLY+jqUK zRE~`-zklrGog(X}$9@ZVUw!8*=l`6mzYLtsg`AvBYz(cxmAhr^j0~(rzXdiOEeu_p zE$sf2(w(BPAvO5DlaN&uQ$4@p-b?fRs}d7&2UQ4Fh?1Hzu*YVjcndqJLw0#q@fR4u zJCJ}>_7-|QbvOfylj+e^_L`5Ep9gqd>XI3-O?Wp z-gt*P29f$Tx(mtS`0d05nHH=gm~Po_^OxxUwV294BDKT>PHVlC5bndncxGR!n(OOm znsNt@Q&N{TLrmsoKFw0&_M9$&+C24`sIXGWgQaz=kY;S{?w`z^Q0JXXBKFLj0w0U6P*+jPKyZHX9F#b0D1$&(- zrm8PJd?+SrVf^JlfTM^qGDK&-p2Kdfg?f>^%>1n8bu&byH(huaocL>l@f%c*QkX2i znl}VZ4R1en4S&Bcqw?$=Zi7ohqB$Jw9x`aM#>pHc0x z0$!q7iFu zZ`tryM70qBI6JWWTF9EjgG@>6SRzsd}3h+4D8d~@CR07P$LJ}MFsYi-*O%XVvD@yT|rJ+Mk zDllJ7$n0V&A!0flbOf)HE6P_afPWZmbhpliqJuw=-h+r;WGk|ntkWN(8tKlYpq5Ow z(@%s>IN8nHRaYb*^d;M(D$zGCv5C|uqmsDjwy4g=Lz>*OhO3z=)VD}C<65;`89Ye} zSCxrv#ILzIpEx1KdLPlM&%Cctf@FqTKvNPXC&`*H9=l=D3r!GLM?UV zOxa(8ZsB`&+76S-_xuj?G#wXBfDY@Z_tMpXJS7^mp z@YX&u0jYw2A+Z+bD#6sgVK5ZgdPSJV3>{K^4~%HV?rn~4D)*2H!67Y>0aOmzup`{D zzDp3c9yEbGCY$U<8biJ_gB*`jluz1ShUd!QUIQJ$*1;MXCMApJ^m*Fiv88RZ zFopLViw}{$Tyhh_{MLGIE2~sZ)t0VvoW%=8qKZ>h=adTe3QM$&$PO2lfqH@brt!9j ziePM8$!CgE9iz6B<6_wyTQj?qYa;eC^{x_0wuwV~W+^fZmFco-o%wsKSnjXFEx02V zF5C2t)T6Gw$Kf^_c;Ei3G~uC8SM-xyycmXyC2hAVi-IfXqhu$$-C=*|X?R0~hu z8`J6TdgflslhrmDZq1f?GXF7*ALeMmOEpRDg(s*H`4>_NAr`2uqF;k;JQ+8>A|_6ZNsNLECC%NNEb1Y1dP zbIEmNpK)#XagtL4R6BC{C5T(+=yA-(Z|Ap}U-AfZM#gwVpus3(gPn}Q$CExObJ5AC z)ff9Yk?wZ}dZ-^)?cbb9Fw#EjqQ8jxF4G3=L?Ra zg_)0QDMV1y^A^>HRI$x?Op@t;oj&H@1xt4SZ9(kifQ zb59B*`M99Td7@aZ3UWvj1rD0sE)d=BsBuW*KwkCds7ay(7*01_+L}b~7)VHI>F_!{ zyxg-&nCO?v#KOUec0{OOKy+sjWA;8rTE|Lv6I9H?CI?H(mUm8VXGwU$49LGpz&{nQp2}dinE1@lZ1iox6{ghN&v^GZv9J${7WaXj)<0S4g_uiJ&JCZ zr8-hsu`U%N;+9N^@&Q0^kVPB3)wY(rr}p7{p0qFHb3NUUHJb672+wRZs`gd1UjKPX z4o6zljKKA+Kkj?H>Ew63o%QjyBk&1!P22;MkD>sM0=z_s-G{mTixJCT9@_|*(p^bz zJ8?ZZ&;pzV+7#6Mn`_U-)k8Pjg?a;|Oe^us^PoPY$Va~yi8|?+&=y$f+lABT<*pZr zP}D{~Pq1Qyni+@|aP;ixO~mbEW9#c0OU#YbDZIaw=_&$K%Ep2f%hO^&P67hApZe`x zv8b`Mz@?M_7-)b!lkQKk)JXXUuT|B8kJlvqRmRpxtQDgvrHMXC1B$M@Y%Me!BSx3P z#2Eawl$HleZhhTS6Txm>lN_+I`>eV$&v9fOg)%zVn3O5mI*lAl>QcHuW6!Kixmq`X zBCZ*Ck6OYtDiK!N47>jxI&O2a9x7M|i^IagRr-fmrmikEQGgw%J7bO|)*$2FW95O4 zeBs>KR)izRG1gRVL;F*sr8A}aRHO0gc$$j&ds8CIO1=Gwq1%_~E)CWNn9pCtBE}+`Jelk4{>S)M)`Ll=!~gnn1yq^EX(+y*ik@3Ou0qU`IgYi3*doM+5&dU!cho$pZ zn%lhKeZkS72P?Cf68<#kll_6OAO26bIbueZx**j6o;I0cS^XiL`y+>{cD}gd%lux} z)3N>MaE24WBZ}s0ApfdM;5J_Ny}rfUyxfkC``Awo2#sgLnGPewK};dORuT?@I6(5~ z?kE)Qh$L&fwJXzK){iYx!l5$Tt|^D~MkGZPA}(o6f7w~O2G6Vvzdo*a;iXzk$B66$ zwF#;wM7A+(;uFG4+UAY(2`*3XXx|V$K8AYu#ECJYSl@S=uZW$ksfC$~qrrbQj4??z-)uz0QL}>k^?fPnJTPw% zGz)~?B4}u0CzOf@l^um}HZzbaIwPmb<)< zi_3@E9lc)Qe2_`*Z^HH;1CXOceL=CHpHS{HySy3T%<^NrWQ}G0i4e1xm_K3(+~oi$ zoHl9wzb?Z4j#90DtURtjtgvi7uw8DzHYmtPb;?%8vb9n@bszT=1qr)V_>R%s!92_` zfnHQPANx z<#hIjIMm#*(v*!OXtF+w8kLu`o?VZ5k7{`vw{Yc^qYclpUGIM_PBN1+c{#Vxv&E*@ zxg=W2W~JuV{IuRYw3>LSI1)a!thID@R=bU+cU@DbR^_SXY`MC7HOsCN z!dO4OKV7(E_Z8T#8MA1H`99?Z!r0)qKW_#|29X3#Jb+5+>qUidbeP1NJ@)(qi2S-X zao|f0_tl(O+$R|Qwd$H{_ig|~I1fbp_$NkI!0E;Y z6JrnU{1Ra6^on{9gUUB0mwzP3S%B#h0fjo>JvV~#+X0P~JV=IG=yHG$O+p5O3NUgG zEQ}z6BTp^Fie)Sg<){Z&I8NwPR(=mO4joTLHkJ>|Tnk23E(Bo`FSbPc05lF2-+)X? z6vV3*m~IBHTy*^E!<0nA(tCOJW2G4DsH7)BxLV8kICn5lu6@U*R`w)o9;Ro$i8=Q^V%uH8n3q=+Yf;SFRZu z!+F&PKcH#8cG?aSK_Tl@K9P#8o+jry@gdexz&d(Q=47<7nw@e@FFfIRNL9^)1i@;A z28+$Z#rjv-wj#heI|<&J_DiJ*s}xd-f!{J8jfqOHE`TiHHZVIA8CjkNQ_u;Ery^^t zl1I75&u^`1_q)crO+JT4rx|z2ToSC>)Or@-D zy3S>jW*sNIZR-EBsfyaJ+Jq4BQE4?SePtD2+jY8*%FsSLZ9MY>+wk?}}}AFAw)vr{ml)8LUG-y9>^t!{~|sgpxYc0Gnkg`&~R z-pilJZjr@y5$>B=VMdZ73svct%##v%wdX~9fz6i3Q-zOKJ9wso+h?VME7}SjL=!NUG{J?M&i!>ma`eoEa@IX`5G>B1(7;%}M*%-# zfhJ(W{y;>MRz!Ic8=S}VaBKqh;~7KdnGEHxcL$kA-6E~=!hrN*zw9N+_=odt<$_H_8dbo;0=42wcAETPCVGUr~v(`Uai zb{=D!Qc!dOEU6v)2eHSZq%5iqK?B(JlCq%T6av$Cb4Rko6onlG&?CqaX7Y_C_cOC3 zYZ;_oI(}=>_07}Oep&Ws7x7-R)cc8zfe!SYxJYP``pi$FDS)4Fvw5HH=FiU6xfVqIM!hJ;Rx8c0cB7~aPtNH(Nmm5Vh{ibAoU#J6 zImRCr?(iyu_4W_6AWo3*vxTPUw@vPwy@E0`(>1Qi=%>5eSIrp^`` zK*Y?fK_6F1W>-7UsB)RPC4>>Ps9)f+^MqM}8AUm@tZ->j%&h1M8s*s!LX5&WxQcAh z8mciQej@RPm?660%>{_D+7er>%zX_{s|$Z+;G7_sfNfBgY(zLB4Ey}J9F>zX#K0f6 z?dVNIeEh?EIShmP6>M+d|0wMM85Sa4diw1hrg|ITJ}JDg@o8y>(rF9mXk5M z2@D|NA)-7>wD&wF;S_$KS=eE84`BGw3g0?6wGxu8ys4rwI?9U=*^VF22t3%mbGeOh z`!O-OpF7#Vceu~F`${bW0nYVU9ecmk31V{tF%iv&5hWofC>I~cqAt@u6|R+|HLMMX zVxuSlMFOK_EQ86#E8&KwxIr8S9tj_goWtLv4f@!&h8;Ov41{J~496vp9vX=(LK#j! zAwi*21RAV-LD>9Cw3bV_9X(X3)Kr0-UaB*7Y>t82EQ%!)(&(XuAYtTsYy-dz+w=$ir)VJpe!_$ z6SGpX^i(af3{o=VlFPC);|J8#(=_8#vdxDe|Cok+ANhYwbE*FO`Su2m1~w+&9<_9~ z-|tTU_ACGN`~CNW5WYYBn^B#SwZ(t4%3aPp z;o)|L6Rk569KGxFLUPx@!6OOa+5OjQLK5w&nAmwxkC5rZ|m&HT8G%GVZxB_@ME z>>{rnXUqyiJrT(8GMj_ap#yN_!9-lO5e8mR3cJiK3NE{_UM&=*vIU`YkiL$1%kf+1 z4=jk@7EEj`u(jy$HnzE33ZVW_J4bj}K;vT?T91YlO(|Y0FU4r+VdbmQ97%(J5 zkK*Bed8+C}FcZ@HIgdCMioV%A<*4pw_n}l*{Cr4}a(lq|injK#O?$tyvyE`S%(1`H z_wwRvk#13ElkZvij2MFGOj`fhy?nC^8`Zyo%yVcUAfEr8x&J#A{|moUBAV_^f$hpaUuyQeY3da^ zS9iRgf87YBwfe}>BO+T&Fl%rfpZh#+AM?Dq-k$Bq`vG6G_b4z%Kbd&v>qFjow*mBl z-OylnqOpLg}or7_VNwRg2za3VBK6FUfFX{|TD z`Wt0Vm2H$vdlRWYQJqDmM?JUbVqL*ZQY|5&sY*?!&%P8qhA~5+Af<{MaGo(dl&C5t zE%t!J0 zh6jqANt4ABdPxSTrVV}fLsRQal*)l&_*rFq(Ez}ClEH6LHv{J#v?+H-BZ2)Wy{K@9 z+ovXHq~DiDvm>O~r$LJo!cOuwL+Oa--6;UFE2q@g3N8Qkw5E>ytz^(&($!O47+i~$ zKM+tkAd-RbmP{s_rh+ugTD;lriL~`Xwkad#;_aM?nQ7L_muEFI}U_4$phjvYgleK~`Fo`;GiC07&Hq1F<%p;9Q;tv5b?*QnR%8DYJH3P>Svmv47Y>*LPZJy8_{9H`g6kQpyZU{oJ`m%&p~D=K#KpfoJ@ zn-3cqmHsdtN!f?~w+(t+I`*7GQA#EQC^lUA9(i6=i1PqSAc|ha91I%X&nXzjYaM{8$s&wEx@aVkQ6M{E2 zfzId#&r(XwUNtPcq4Ngze^+XaJA1EK-%&C9j>^9(secqe{}z>hR5CFNveMsVA)m#S zk)_%SidkY-XmMWlVnQ(mNJ>)ooszQ#vaK;!rPmGKXV7am^_F!Lz>;~{VrIO$;!#30XRhE1QqO_~#+Ux;B_D{Nk=grn z8Y0oR^4RqtcYM)7a%@B(XdbZCOqnX#fD{BQTeLvRHd(irHKq=4*jq34`6@VAQR8WG z^%)@5CXnD_T#f%@-l${>y$tfb>2LPmc{~5A82|16mH)R?&r#KKLs7xpN-D`=&Cm^R zvMA6#Ahr<3X>Q7|-qfTY)}32HkAz$_mibYV!I)u>bmjK`qwBe(>za^0Kt*HnFbSdO z1>+ryKCNxmm^)*$XfiDOF2|{-v3KKB?&!(S_Y=Ht@|ir^hLd978xuI&N{k>?(*f8H z=ClxVJK_%_z1TH0eUwm2J+2To7FK4o+n_na)&#VLn1m;!+CX+~WC+qg1?PA~KdOlC zW)C@pw75_xoe=w7i|r9KGIvQ$+3K?L{7TGHwrQM{dCp=Z*D}3kX7E-@sZnup!BImw z*T#a=+WcTwL78exTgBn|iNE3#EsOorO z*kt)gDzHiPt07fmisA2LWN?AymkdqTgr?=loT7z@d`wnlr6oN}@o|&JX!yPzC*Y8d zu6kWlTzE1)ckyBn+0Y^HMN+GA$wUO_LN6W>mxCo!0?oiQvT`z$jbSEu&{UHRU0E8# z%B^wOc@S!yhMT49Y)ww(Xta^8pmPCe@eI5C*ed96)AX9<>))nKx0(sci8gwob_1}4 z0DIL&vsJ1_s%<@y%U*-eX z5rN&(zef-5G~?@r79oZGW1d!WaTqQn0F6RIOa9tJ=0(kdd{d1{<*tHT#cCvl*i>YY zH+L7jq8xZNcTUBqj(S)ztTU!TM!RQ}In*n&Gn<>(60G7}4%WQL!o>hbJqNDSGwl#H z`4k+twp0cj%PsS+NKaxslAEu9!#U3xT1|_KB6`h=PI0SW`P9GTa7caD1}vKEglV8# zjKZR`pluCW19c2fM&ZG)c3T3Um;ir3y(tSCJ7Agl6|b524dy5El{^EQBG?E61H0XY z`bqg!;zhGhyMFl&(o=JWEJ8n~z)xI}A@C0d2hQGvw7nGv)?POU@(kS1m=%`|+^ika zXl8zjS?xqW$WlO?Ewa;vF~XbybHBor$f<%I&*t$F5fynwZlTGj|IjZtVfGa7l&tK} zW>I<69w(cZLu)QIVG|M2xzW@S+70NinQzk&Y0+3WT*cC)rx~04O-^<{JohU_&HL5XdUKW!uFy|i$FB|EMu0eUyW;gsf`XfIc!Z0V zeK&*hPL}f_cX=@iv>K%S5kL;cl_$v?n(Q9f_cChk8Lq$glT|=e+T*8O4H2n<=NGmn z+2*h+v;kBvF>}&0RDS>)B{1!_*XuE8A$Y=G8w^qGMtfudDBsD5>T5SB;Qo}fSkkiV ze^K^M(UthkwrD!&*tTsu>Dacdj_q`~V%r_twr$(Ct&_dKeeXE?fA&4&yASJWJ*}~- zel=@W)tusynfC_YqH4ll>4Eg`Xjs5F7Tj>tTLz<0N3)X<1px_d2yUY>X~y>>93*$) z5PuNMQLf9Bu?AAGO~a_|J2akO1M*@VYN^VxvP0F$2>;Zb9;d5Yfd8P%oFCCoZE$ z4#N$^J8rxYjUE_6{T%Y>MmWfHgScpuGv59#4u6fpTF%~KB^Ae`t1TD_^Ud#DhL+Dm zbY^VAM#MrAmFj{3-BpVSWph2b_Y6gCnCAombVa|1S@DU)2r9W<> zT5L8BB^er3zxKt1v(y&OYk!^aoQisqU zH(g@_o)D~BufUXcPt!Ydom)e|aW{XiMnes2z&rE?og>7|G+tp7&^;q?Qz5S5^yd$i z8lWr4g5nctBHtigX%0%XzIAB8U|T6&JsC4&^hZBw^*aIcuNO47de?|pGXJ4t}BB`L^d8tD`H`i zqrP8?#J@8T#;{^B!KO6J=@OWKhAerih(phML`(Rg7N1XWf1TN>=Z3Do{l_!d~DND&)O)D>ta20}@Lt77qSnVsA7>)uZAaT9bsB>u&aUQl+7GiY2|dAEg@%Al3i316y;&IhQL^8fw_nwS>f60M_-m+!5)S_6EPM7Y)(Nq^8gL7(3 zOiot`6Wy6%vw~a_H?1hLVzIT^i1;HedHgW9-P#)}Y6vF%C=P70X0Tk^z9Te@kPILI z_(gk!k+0%CG)%!WnBjjw*kAKs_lf#=5HXC00s-}oM-Q1aXYLj)(1d!_a7 z*Gg4Fe6F$*ujVjI|79Z5+Pr`us%zW@ln++2l+0hsngv<{mJ%?OfSo_3HJXOCys{Ug z00*YR-(fv<=&%Q!j%b-_ppA$JsTm^_L4x`$k{VpfLI(FMCap%LFAyq;#ns5bR7V+x zO!o;c5y~DyBPqdVQX)8G^G&jWkBy2|oWTw>)?5u}SAsI$RjT#)lTV&Rf8;>u*qXnb z8F%Xb=7#$m)83z%`E;49)t3fHInhtc#kx4wSLLms!*~Z$V?bTyUGiS&m>1P(952(H zuHdv=;o*{;5#X-uAyon`hP}d#U{uDlV?W?_5UjJvf%11hKwe&(&9_~{W)*y1nR5f_ z!N(R74nNK`y8>B!0Bt_Vr!;nc3W>~RiKtGSBkNlsR#-t^&;$W#)f9tTlZz>n*+Fjz z3zXZ;jf(sTM(oDzJt4FJS*8c&;PLTW(IQDFs_5QPy+7yhi1syPCarvqrHFcf&yTy)^O<1EBx;Ir`5W{TIM>{8w&PB>ro4;YD<5LF^TjTb0!zAP|QijA+1Vg>{Afv^% zmrkc4o6rvBI;Q8rj4*=AZacy*n8B{&G3VJc)so4$XUoie0)vr;qzPZVbb<#Fc=j+8CGBWe$n|3K& z_@%?{l|TzKSlUEO{U{{%Fz_pVDxs7i9H#bnbCw7@4DR=}r_qV!Zo~CvD4ZI*+j3kO zW6_=|S`)(*gM0Z;;}nj`73OigF4p6_NPZQ-Od~e$c_);;4-7sR>+2u$6m$Gf%T{aq zle>e3(*Rt(TPD}03n5)!Ca8Pu!V}m6v0o1;5<1h$*|7z|^(3$Y&;KHKTT}hV056wuF0Xo@mK-52~r=6^SI1NC%c~CC?n>yX6wPTgiWYVz!Sx^atLby9YNn1Rk{g?|pJaxD4|9cUf|V1_I*w zzxK)hRh9%zOl=*$?XUjly5z8?jPMy%vEN)f%T*|WO|bp5NWv@B(K3D6LMl!-6dQg0 zXNE&O>Oyf%K@`ngCvbGPR>HRg5!1IV$_}m@3dWB7x3t&KFyOJn9pxRXCAzFr&%37wXG;z^xaO$ekR=LJG ztIHpY8F5xBP{mtQidqNRoz= z@){+N3(VO5bD+VrmS^YjG@+JO{EOIW)9=F4v_$Ed8rZtHvjpiEp{r^c4F6Ic#ChlC zJX^DtSK+v(YdCW)^EFcs=XP7S>Y!4=xgmv>{S$~@h=xW-G4FF9?I@zYN$e5oF9g$# zb!eVU#J+NjLyX;yb)%SY)xJdvGhsnE*JEkuOVo^k5PyS=o#vq!KD46UTW_%R=Y&0G zFj6bV{`Y6)YoKgqnir2&+sl+i6foAn-**Zd1{_;Zb7Ki=u394C5J{l^H@XN`_6XTKY%X1AgQM6KycJ+= zYO=&t#5oSKB^pYhNdzPgH~aEGW2=ec1O#s-KG z71}LOg@4UEFtp3GY1PBemXpNs6UK-ax*)#$J^pC_me;Z$Je(OqLoh|ZrW*mAMBFn< zHttjwC&fkVfMnQeen8`Rvy^$pNRFVaiEN4Pih*Y3@jo!T0nsClN)pdrr9AYLcZxZ| zJ5Wlj+4q~($hbtuY zVQ7hl>4-+@6g1i`1a)rvtp-;b0>^`Dloy(#{z~ytgv=j4q^Kl}wD>K_Y!l~ zp(_&7sh`vfO(1*MO!B%<6E_bx1)&s+Ae`O)a|X=J9y~XDa@UB`m)`tSG4AUhoM=5& znWoHlA-(z@3n0=l{E)R-p8sB9XkV zZ#D8wietfHL?J5X0%&fGg@MH~(rNS2`GHS4xTo7L$>TPme+Is~!|79=^}QbPF>m%J zFMkGzSndiPO|E~hrhCeo@&Ea{M(ieIgRWMf)E}qeTxT8Q#g-!Lu*x$v8W^M^>?-g= zwMJ$dThI|~M06rG$Sv@C@tWR>_YgaG&!BAbkGggVQa#KdtDB)lMLNVLN|51C@F^y8 zCRvMB^{GO@j=cHfmy}_pCGbP%xb{pNN>? z?7tBz$1^zVaP|uaatYaIN+#xEN4jBzwZ|YI_)p(4CUAz1ZEbDk>J~Y|63SZaak~#0 zoYKruYsWHoOlC1(MhTnsdUOwQfz5p6-D0}4;DO$B;7#M{3lSE^jnTT;ns`>!G%i*F?@pR1JO{QTuD0U+~SlZxcc8~>IB{)@8p`P&+nDxNj`*gh|u?yrv$phpQcW)Us)bi`kT%qLj(fi{dWRZ%Es2!=3mI~UxiW0$-v3vUl?#g{p6eF zMEUAqo5-L0Ar(s{VlR9g=j7+lt!gP!UN2ICMokAZ5(Agd>})#gkA2w|5+<%-CuEP# zqgcM}u@3(QIC^Gx<2dbLj?cFSws_f3e%f4jeR?4M^M3cx1f+Qr6ydQ>n)kz1s##2w zk}UyQc+Z5G-d-1}{WzjkLXgS-2P7auWSJ%pSnD|Uivj5u!xk0 z_^-N9r9o;(rFDt~q1PvE#iJZ_f>J3gcP$)SOqhE~pD2|$=GvpL^d!r z6u=sp-CrMoF7;)}Zd7XO4XihC4ji?>V&(t^?@3Q&t9Mx=qex6C9d%{FE6dvU6%d94 zIE;hJ1J)cCqjv?F``7I*6bc#X)JW2b4f$L^>j{*$R`%5VHFi*+Q$2;nyieduE}qdS{L8y8F08yLs?w}{>8>$3236T-VMh@B zq-nujsb_1aUv_7g#)*rf9h%sFj*^mIcImRV*k~Vmw;%;YH(&ylYpy!&UjUVqqtfG` zox3esju?`unJJA_zKXRJP)rA3nXc$m^{S&-p|v|-0x9LHJm;XIww7C#R$?00l&Yyj z=e}gKUOpsImwW?N)+E(awoF@HyP^EhL+GlNB#k?R<2>95hz!h9sF@U20DHSB3~WMa zk90+858r@-+vWwkawJ)8ougd(i#1m3GLN{iSTylYz$brAsP%=&m$mQQrH$g%3-^VR zE%B`Vi&m8f3T~&myTEK28BDWCVzfWir1I?03;pX))|kY5ClO^+bae z*7E?g=3g7EiisYOrE+lA)2?Ln6q2*HLNpZEWMB|O-JI_oaHZB%CvYB(%=tU= zE*OY%QY58fW#RG5=gm0NR#iMB=EuNF@)%oZJ}nmm=tsJ?eGjia{e{yuU0l3{d^D@)kVDt=1PE)&tf_hHC%0MB znL|CRCPC}SeuVTdf>-QV70`0(EHizc21s^sU>y%hW0t!0&y<7}Wi-wGy>m%(-jsDj zP?mF|>p_K>liZ6ZP(w5(|9Ga%>tLgb$|doDDfkdW>Z z`)>V2XC?NJT26mL^@ zf+IKr27TfM!UbZ@?zRddC7#6ss1sw%CXJ4FWC+t3lHZupzM77m^=9 z&(a?-LxIq}*nvv)y?27lZ{j zifdl9hyJudyP2LpU$-kXctshbJDKS{WfulP5Dk~xU4Le4c#h^(YjJit4#R8_khheS z|8(>2ibaHES4+J|DBM7I#QF5u-*EdN{n=Kt@4Zt?@Tv{JZA{`4 zU#kYOv{#A&gGPwT+$Ud}AXlK3K7hYzo$(fBSFjrP{QQ zeaKg--L&jh$9N}`pu{Bs>?eDFPaWY4|9|foN%}i;3%;@4{dc+iw>m}{3rELqH21G! z`8@;w-zsJ1H(N3%|1B@#ioLOjib)j`EiJqPQVSbPSPVHCj6t5J&(NcWzBrzCiDt{4 zdlPAUKldz%6x5II1H_+jv)(xVL+a;P+-1hv_pM>gMRr%04@k;DTokASSKKhU1Qms| zrWh3a!b(J3n0>-tipg{a?UaKsP7?+|@A+1WPDiQIW1Sf@qDU~M_P65_s}7(gjTn0X zucyEm)o;f8UyshMy&>^SC3I|C6jR*R_GFwGranWZe*I>K+0k}pBuET&M~ z;Odo*ZcT?ZpduHyrf8E%IBFtv;JQ!N_m>!sV6ly$_1D{(&nO~w)G~Y`7sD3#hQk%^ zp}ucDF_$!6DAz*PM8yE(&~;%|=+h(Rn-=1Wykas_-@d&z#=S}rDf`4w(rVlcF&lF! z=1)M3YVz7orwk^BXhslJ8jR);sh^knJW(Qmm(QdSgIAIdlN4Te5KJisifjr?eB{FjAX1a0AB>d?qY4Wx>BZ8&}5K0fA+d{l8 z?^s&l8#j7pR&ijD?0b%;lL9l$P_mi2^*_OL+b}4kuLR$GAf85sOo02?Y#90}CCDiS zZ%rbCw>=H~CBO=C_JVV=xgDe%b4FaEFtuS7Q1##y686r%F6I)s-~2(}PWK|Z8M+Gu zl$y~5@#0Ka%$M<&Cv%L`a8X^@tY&T7<0|(6dNT=EsRe0%kp1Qyq!^43VAKYnr*A5~ zsI%lK1ewqO;0TpLrT9v}!@vJK{QoVa_+N4FYT#h?Y8rS1S&-G+m$FNMP?(8N`MZP zels(*?kK{{^g9DOzkuZXJ2;SrOQsp9T$hwRB1(phw1c7`!Q!by?Q#YsSM#I12RhU{$Q+{xj83axHcftEc$mNJ8_T7A-BQc*k(sZ+~NsO~xAA zxnbb%dam_fZlHvW7fKXrB~F&jS<4FD2FqY?VG?ix*r~MDXCE^WQ|W|WM;gsIA4lQP zJ2hAK@CF*3*VqPr2eeg6GzWFlICi8S>nO>5HvWzyZTE)hlkdC_>pBej*>o0EOHR|) z$?};&I4+_?wvL*g#PJ9)!bc#9BJu1(*RdNEn>#Oxta(VWeM40ola<0aOe2kSS~{^P zDJBd}0L-P#O-CzX*%+$#v;(x%<*SPgAje=F{Zh-@ucd2DA(yC|N_|ocs*|-!H%wEw z@Q!>siv2W;C^^j^59OAX03&}&D*W4EjCvfi(ygcL#~t8XGa#|NPO+*M@Y-)ctFA@I z-p7npT1#5zOLo>7q?aZpCZ=iecn3QYklP;gF0bq@>oyBq94f6C=;Csw3PkZ|5q=(c zfs`aw?II0e(h=|7o&T+hq&m$; zBrE09Twxd9BJ2P+QPN}*OdZ-JZV7%av@OM7v!!NL8R;%WFq*?{9T3{ct@2EKgc8h) zMxoM$SaF#p<`65BwIDfmXG6+OiK0e)`I=!A3E`+K@61f}0e z!2a*FOaDrOe>U`q%K!QN`&=&0C~)CaL3R4VY(NDt{Xz(Xpqru5=r#uQN1L$Je1*dkdqQ*=lofQaN%lO!<5z9ZlHgxt|`THd>2 zsWfU$9=p;yLyJyM^t zS2w9w?Bpto`@H^xJpZDKR1@~^30Il6oFGfk5%g6w*C+VM)+%R@gfIwNprOV5{F^M2 zO?n3DEzpT+EoSV-%OdvZvNF+pDd-ZVZ&d8 zKeIyrrfPN=EcFRCPEDCVflX#3-)Ik_HCkL(ejmY8vzcf-MTA{oHk!R2*36`O68$7J zf}zJC+bbQk--9Xm!u#lgLvx8TXx2J258E5^*IZ(FXMpq$2LUUvhWQPs((z1+2{Op% z?J}9k5^N=z;7ja~zi8a_-exIqWUBJwohe#4QJ`|FF*$C{lM18z^#hX6!5B8KAkLUX ziP=oti-gpV(BsLD{0(3*dw}4JxK23Y7M{BeFPucw!sHpY&l%Ws4pSm`+~V7;bZ%Dx zeI)MK=4vC&5#;2MT7fS?^ch9?2;%<8Jlu-IB&N~gg8t;6S-#C@!NU{`p7M8@2iGc& zg|JPg%@gCoCQ&s6JvDU&`X2S<57f(k8nJ1wvBu{8r?;q3_kpZZ${?|( z+^)UvR33sjSd)aT!UPkA;ylO6{aE3MQa{g%Mcf$1KONcjO@&g5zPHWtzM1rYC{_K> zgQNcs<{&X{OA=cEWw5JGqpr0O>x*Tfak2PE9?FuWtz^DDNI}rwAaT0(bdo-<+SJ6A z&}S%boGMWIS0L}=S>|-#kRX;e^sUsotry(MjE|3_9duvfc|nwF#NHuM-w7ZU!5ei8 z6Mkf>2)WunY2eU@C-Uj-A zG(z0Tz2YoBk>zCz_9-)4a>T46$(~kF+Y{#sA9MWH%5z#zNoz)sdXq7ZR_+`RZ%0(q zC7&GyS_|BGHNFl8Xa%@>iWh%Gr?=J5<(!OEjauj5jyrA-QXBjn0OAhJJ9+v=!LK`` z@g(`^*84Q4jcDL`OA&ZV60djgwG`|bcD*i50O}Q{9_noRg|~?dj%VtKOnyRs$Uzqg z191aWoR^rDX#@iSq0n z?9Sg$WSRPqSeI<}&n1T3!6%Wj@5iw5`*`Btni~G=&;J+4`7g#OQTa>u`{4ZZ(c@s$ zK0y;ySOGD-UTjREKbru{QaS>HjN<2)R%Nn-TZiQ(Twe4p@-saNa3~p{?^V9Nixz@a zykPv~<@lu6-Ng9i$Lrk(xi2Tri3q=RW`BJYOPC;S0Yly%77c727Yj-d1vF!Fuk{Xh z)lMbA69y7*5ufET>P*gXQrxsW+ zz)*MbHZv*eJPEXYE<6g6_M7N%#%mR{#awV3i^PafNv(zyI)&bH?F}2s8_rR(6%!V4SOWlup`TKAb@ee>!9JKPM=&8g#BeYRH9FpFybxBXQI2|g}FGJfJ+ zY-*2hB?o{TVL;Wt_ek;AP5PBqfDR4@Z->_182W z{P@Mc27j6jE*9xG{R$>6_;i=y{qf(c`5w9fa*`rEzX6t!KJ(p1H|>J1pC-2zqWENF zmm=Z5B4u{cY2XYl(PfrInB*~WGWik3@1oRhiMOS|D;acnf-Bs(QCm#wR;@Vf!hOPJ zgjhDCfDj$HcyVLJ=AaTbQ{@vIv14LWWF$=i-BDoC11}V;2V8A`S>_x)vIq44-VB-v z*w-d}$G+Ql?En8j!~ZkCpQ$|cA0|+rrY>tiCeWxkRGPoarxlGU2?7%k#F693RHT24 z-?JsiXlT2PTqZqNb&sSc>$d;O4V@|b6VKSWQb~bUaWn1Cf0+K%`Q&Wc<>mQ>*iEGB zbZ;aYOotBZ{vH3y<0A*L0QVM|#rf*LIsGx(O*-7)r@yyBIzJnBFSKBUSl1e|8lxU* zzFL+YDVVkIuzFWeJ8AbgN&w(4-7zbiaMn{5!JQXu)SELk*CNL+Fro|2v|YO)1l15t zs(0^&EB6DPMyaqvY>=KL>)tEpsn;N5Q#yJj<9}ImL((SqErWN3Q=;tBO~ExTCs9hB z2E$7eN#5wX4<3m^5pdjm#5o>s#eS_Q^P)tm$@SawTqF*1dj_i#)3};JslbLKHXl_N z)Fxzf>FN)EK&Rz&*|6&%Hs-^f{V|+_vL1S;-1K-l$5xiC@}%uDuwHYhmsV?YcOUlk zOYkG5v2+`+UWqpn0aaaqrD3lYdh0*!L`3FAsNKu=Q!vJu?Yc8n|CoYyDo_`r0mPoo z8>XCo$W4>l(==h?2~PoRR*kEe)&IH{1sM41mO#-36`02m#nTX{r*r`Q5rZ2-sE|nA zhnn5T#s#v`52T5|?GNS`%HgS2;R(*|^egNPDzzH_z^W)-Q98~$#YAe)cEZ%vge965AS_am#DK#pjPRr-!^za8>`kksCAUj(Xr*1NW5~e zpypt_eJpD&4_bl_y?G%>^L}=>xAaV>KR6;^aBytqpiHe%!j;&MzI_>Sx7O%F%D*8s zSN}cS^<{iiK)=Ji`FpO#^zY!_|D)qeRNAtgmH)m;qC|mq^j(|hL`7uBz+ULUj37gj zksdbnU+LSVo35riSX_4z{UX=%n&}7s0{WuZYoSfwAP`8aKN9P@%e=~1`~1ASL-z%# zw>DO&ixr}c9%4InGc*_y42bdEk)ZdG7-mTu0bD@_vGAr*NcFoMW;@r?@LUhRI zCUJgHb`O?M3!w)|CPu~ej%fddw20lod?Ufp8Dmt0PbnA0J%KE^2~AIcnKP()025V> zG>noSM3$5Btmc$GZoyP^v1@Poz0FD(6YSTH@aD0}BXva?LphAiSz9f&Y(aDAzBnUh z?d2m``~{z;{}kZJ>a^wYI?ry(V9hIoh;|EFc0*-#*`$T0DRQ1;WsqInG;YPS+I4{g zJGpKk%%Sdc5xBa$Q^_I~(F97eqDO7AN3EN0u)PNBAb+n+ zWBTxQx^;O9o0`=g+Zrt_{lP!sgWZHW?8bLYS$;1a@&7w9rD9|Ge;Gb?sEjFoF9-6v z#!2)t{DMHZ2@0W*fCx;62d#;jouz`R5Y(t{BT=$N4yr^^o$ON8d{PQ=!O zX17^CrdM~7D-;ZrC!||<+FEOxI_WI3CA<35va%4v>gc zEX-@h8esj=a4szW7x{0g$hwoWRQG$yK{@3mqd-jYiVofJE!Wok1* znV7Gm&Ssq#hFuvj1sRyHg(6PFA5U*Q8Rx>-blOs=lb`qa{zFy&n4xY;sd$fE+<3EI z##W$P9M{B3c3Si9gw^jlPU-JqD~Cye;wr=XkV7BSv#6}DrsXWFJ3eUNrc%7{=^sP> zrp)BWKA9<}^R9g!0q7yWlh;gr_TEOD|#BmGq<@IV;ueg+D2}cjpp+dPf&Q(36sFU&K8}hA85U61faW&{ zlB`9HUl-WWCG|<1XANN3JVAkRYvr5U4q6;!G*MTdSUt*Mi=z_y3B1A9j-@aK{lNvx zK%p23>M&=KTCgR!Ee8c?DAO2_R?B zkaqr6^BSP!8dHXxj%N1l+V$_%vzHjqvu7p@%Nl6;>y*S}M!B=pz=aqUV#`;h%M0rU zHfcog>kv3UZAEB*g7Er@t6CF8kHDmKTjO@rejA^ULqn!`LwrEwOVmHx^;g|5PHm#B zZ+jjWgjJ!043F+&#_;D*mz%Q60=L9Ove|$gU&~As5^uz@2-BfQ!bW)Khn}G+Wyjw- z19qI#oB(RSNydn0t~;tAmK!P-d{b-@@E5|cdgOS#!>%#Rj6ynkMvaW@37E>@hJP^8 z2zk8VXx|>#R^JCcWdBCy{0nPmYFOxN55#^-rlqobe0#L6)bi?E?SPymF*a5oDDeSd zO0gx?#KMoOd&G(2O@*W)HgX6y_aa6iMCl^~`{@UR`nMQE`>n_{_aY5nA}vqU8mt8H z`oa=g0SyiLd~BxAj2~l$zRSDHxvDs;I4>+M$W`HbJ|g&P+$!U7-PHX4RAcR0szJ*( ze-417=bO2q{492SWrqDK+L3#ChUHtz*@MP)e^%@>_&#Yk^1|tv@j4%3T)diEX zATx4K*hcO`sY$jk#jN5WD<=C3nvuVsRh||qDHnc~;Kf59zr0;c7VkVSUPD%NnnJC_ zl3F^#f_rDu8l}l8qcAz0FFa)EAt32IUy_JLIhU_J^l~FRH&6-ivSpG2PRqzDdMWft>Zc(c)#tb%wgmWN%>IOPm zZi-noqS!^Ftb81pRcQi`X#UhWK70hy4tGW1mz|+vI8c*h@ zfFGJtW3r>qV>1Z0r|L>7I3un^gcep$AAWfZHRvB|E*kktY$qQP_$YG60C@X~tTQjB3%@`uz!qxtxF+LE!+=nrS^07hn` zEgAp!h|r03h7B!$#OZW#ACD+M;-5J!W+{h|6I;5cNnE(Y863%1(oH}_FTW})8zYb$7czP zg~Szk1+_NTm6SJ0MS_|oSz%e(S~P-&SFp;!k?uFayytV$8HPwuyELSXOs^27XvK-D zOx-Dl!P|28DK6iX>p#Yb%3`A&CG0X2S43FjN%IB}q(!hC$fG}yl1y9W&W&I@KTg6@ zK^kpH8=yFuP+vI^+59|3%Zqnb5lTDAykf z9S#X`3N(X^SpdMyWQGOQRjhiwlj!0W-yD<3aEj^&X%=?`6lCy~?`&WSWt z?U~EKFcCG_RJ(Qp7j=$I%H8t)Z@6VjA#>1f@EYiS8MRHZphp zMA_5`znM=pzUpBPO)pXGYpQ6gkine{6u_o!P@Q+NKJ}k!_X7u|qfpAyIJb$_#3@wJ z<1SE2Edkfk9C!0t%}8Yio09^F`YGzpaJHGk*-ffsn85@)%4@`;Fv^8q(-Wk7r=Q8p zT&hD`5(f?M{gfzGbbwh8(}G#|#fDuk7v1W)5H9wkorE0ZZjL0Q1=NRGY>zwgfm81DdoaVwNH;or{{eSyybt)m<=zXoA^RALYG-2t zouH|L*BLvmm9cdMmn+KGopyR@4*=&0&4g|FLoreZOhRmh=)R0bg~ zT2(8V_q7~42-zvb)+y959OAv!V$u(O3)%Es0M@CRFmG{5sovIq4%8Ahjk#*5w{+)+ zMWQoJI_r$HxL5km1#6(e@{lK3Udc~n0@g`g$s?VrnQJ$!oPnb?IHh-1qA`Rz$)Ai< z6w$-MJW-gKNvOhL+XMbE7&mFt`x1KY>k4(!KbbpZ`>`K@1J<(#vVbjx@Z@(6Q}MF# zMnbr-f55(cTa^q4+#)=s+ThMaV~E`B8V=|W_fZWDwiso8tNMTNse)RNBGi=gVwgg% zbOg8>mbRN%7^Um-7oj4=6`$|(K7!+t^90a{$18Z>}<#!bm%ZEFQ{X(yBZMc>lCz0f1I2w9Sq zuGh<9<=AO&g6BZte6hn>Qmvv;Rt)*cJfTr2=~EnGD8P$v3R|&1RCl&7)b+`=QGapi zPbLg_pxm`+HZurtFZ;wZ=`Vk*do~$wB zxoW&=j0OTbQ=Q%S8XJ%~qoa3Ea|au5o}_(P;=!y-AjFrERh%8la!z6Fn@lR?^E~H12D?8#ht=1F;7@o4$Q8GDj;sSC%Jfn01xgL&%F2 zwG1|5ikb^qHv&9hT8w83+yv&BQXOQyMVJSBL(Ky~p)gU3#%|blG?IR9rP^zUbs7rOA0X52Ao=GRt@C&zlyjNLv-} z9?*x{y(`509qhCV*B47f2hLrGl^<@SuRGR!KwHei?!CM10Tq*YDIoBNyRuO*>3FU? zHjipIE#B~y3FSfOsMfj~F9PNr*H?0oHyYB^G(YyNh{SxcE(Y-`x5jFMKb~HO*m+R% zrq|ic4fzJ#USpTm;X7K+E%xsT_3VHKe?*uc4-FsILUH;kL>_okY(w`VU*8+l>o>Jm ziU#?2^`>arnsl#)*R&nf_%>A+qwl%o{l(u)M?DK1^mf260_oteV3#E_>6Y4!_hhVD zM8AI6MM2V*^_M^sQ0dmHu11fy^kOqXqzpr?K$`}BKWG`=Es(9&S@K@)ZjA{lj3ea7_MBP zk(|hBFRjHVMN!sNUkrB;(cTP)T97M$0Dtc&UXSec<+q?y>5=)}S~{Z@ua;1xt@=T5 zI7{`Z=z_X*no8s>mY;>BvEXK%b`a6(DTS6t&b!vf_z#HM{Uoy_5fiB(zpkF{})ruka$iX*~pq1ZxD?q68dIo zIZSVls9kFGsTwvr4{T_LidcWtt$u{kJlW7moRaH6+A5hW&;;2O#$oKyEN8kx`LmG)Wfq4ykh+q{I3|RfVpkR&QH_x;t41Uw z`P+tft^E2B$domKT@|nNW`EHwyj>&}K;eDpe z1bNOh=fvIfk`&B61+S8ND<(KC%>y&?>opCnY*r5M+!UrWKxv0_QvTlJc>X#AaI^xo zaRXL}t5Ej_Z$y*|w*$6D+A?Lw-CO-$itm^{2Ct82-<0IW)0KMNvJHgBrdsIR0v~=H z?n6^}l{D``Me90`^o|q!olsF?UX3YSq^6Vu>Ijm>>PaZI8G@<^NGw{Cx&%|PwYrfw zR!gX_%AR=L3BFsf8LxI|K^J}deh0ZdV?$3r--FEX`#INxsOG6_=!v)DI>0q|BxT)z z-G6kzA01M?rba+G_mwNMQD1mbVbNTWmBi*{s_v_Ft9m2Avg!^78(QFu&n6mbRJ2bA zv!b;%yo{g*9l2)>tsZJOOp}U~8VUH`}$ z8p_}t*XIOehezolNa-a2x0BS})Y9}&*TPgua{Ewn-=wVrmJUeU39EKx+%w%=ixQWK zDLpwaNJs65#6o7Ln7~~X+p_o2BR1g~VCfxLzxA{HlWAI6^H;`juI=&r1jQrUv_q0Z z1Ja-tjdktrrP>GOC*#p?*xfQU5MqjMsBe!9lh(u8)w$e@Z|>aUHI5o;MGw*|Myiz3 z-f0;pHg~Q#%*Kx8MxH%AluVXjG2C$)WL-K63@Q`#y9_k_+}eR(x4~dp7oV-ek0H>I zgy8p#i4GN{>#v=pFYUQT(g&b$OeTy-X_#FDgNF8XyfGY6R!>inYn8IR2RDa&O!(6< znXs{W!bkP|s_YI*Yx%4stI`=ZO45IK6rBs`g7sP40ic}GZ58s?Mc$&i`kq_tfci>N zIHrC0H+Qpam1bNa=(`SRKjixBTtm&e`j9porEci!zdlg1RI0Jw#b(_Tb@RQK1Zxr_ z%7SUeH6=TrXt3J@js`4iDD0=IoHhK~I7^W8^Rcp~Yaf>2wVe|Hh1bUpX9ATD#moByY57-f2Ef1TP^lBi&p5_s7WGG9|0T}dlfxOx zXvScJO1Cnq`c`~{Dp;{;l<-KkCDE+pmexJkd}zCgE{eF=)K``-qC~IT6GcRog_)!X z?fK^F8UDz$(zFUrwuR$qro5>qqn>+Z%<5>;_*3pZ8QM|yv9CAtrAx;($>4l^_$_-L z*&?(77!-=zvnCVW&kUcZMb6;2!83si518Y%R*A3JZ8Is|kUCMu`!vxDgaWjs7^0j( ziTaS4HhQ)ldR=r)_7vYFUr%THE}cPF{0H45FJ5MQW^+W>P+eEX2kLp3zzFe*-pFVA zdDZRybv?H|>`9f$AKVjFWJ=wegO7hOOIYCtd?Vj{EYLT*^gl35|HQ`R=ti+ADm{jyQE7K@kdjuqJhWVSks>b^ zxha88-h3s;%3_5b1TqFCPTxVjvuB5U>v=HyZ$?JSk+&I%)M7KE*wOg<)1-Iy)8-K! z^XpIt|0ibmk9RtMmlUd7#Ap3Q!q9N4atQy)TmrhrFhfx1DAN`^vq@Q_SRl|V z#lU<~n67$mT)NvHh`%als+G-)x1`Y%4Bp*6Un5Ri9h=_Db zA-AdP!f>f0m@~>7X#uBM?diI@)Egjuz@jXKvm zJo+==juc9_<;CqeRaU9_Mz@;3e=E4=6TK+c`|uu#pIqhSyNm`G(X)&)B`8q0RBv#> z`gGlw(Q=1Xmf55VHj%C#^1lpc>LY8kfA@|rlC1EA<1#`iuyNO z(=;irt{_&K=i4)^x%;U(Xv<)+o=dczC5H3W~+e|f~{*ucxj@{Yi-cw^MqYr3fN zF5D+~!wd$#al?UfMnz(@K#wn`_5na@rRr8XqN@&M&FGEC@`+OEv}sI1hw>Up0qAWf zL#e4~&oM;TVfjRE+10B_gFlLEP9?Q-dARr3xi6nQqnw>k-S;~b z;!0s2VS4}W8b&pGuK=7im+t(`nz@FnT#VD|!)eQNp-W6)@>aA+j~K*H{$G`y2|QHY z|Hmy+CR@#jWY4~)lr1qBJB_RfHJFfP<}pK5(#ZZGSqcpyS&}01LnTWk5fzmXMGHkJ zTP6L^B+uj;lmB_W<~4=${+v0>z31M!-_O@o-O9GyW)j_mjx}!0@br_LE-7SIuPP84 z;5=O(U*g_um0tyG|61N@d9lEuOeiRd+#NY^{nd5;-CVlw&Ap7J?qwM^?E29wvS}2d zbzar4Fz&RSR(-|s!Z6+za&Z zY#D<5q_JUktIzvL0)yq_kLWG6DO{ri=?c!y!f(Dk%G{8)k`Gym%j#!OgXVDD3;$&v@qy#ISJfp=Vm>pls@9-mapVQChAHHd-x+OGx)(*Yr zC1qDUTZ6mM(b_hi!TuFF2k#8uI2;kD70AQ&di$L*4P*Y-@p`jdm%_c3f)XhYD^6M8&#Y$ZpzQMcR|6nsH>b=*R_Von!$BTRj7yGCXokoAQ z&ANvx0-Epw`QIEPgI(^cS2f(Y85yV@ygI{ewyv5Frng)e}KCZF7JbR(&W618_dcEh(#+^zZFY;o<815<5sOHQdeax9_!PyM&;{P zkBa5xymca0#)c#tke@3KNEM8a_mT&1gm;p&&JlMGH(cL(b)BckgMQ^9&vRwj!~3@l zY?L5}=Jzr080OGKb|y`ee(+`flQg|!lo6>=H)X4`$Gz~hLmu2a%kYW_Uu8x09Pa0J zKZ`E$BKJ=2GPj_3l*TEcZ*uYRr<*J^#5pILTT;k_cgto1ZL-%slyc16J~OH-(RgDA z%;EjEnoUkZ&acS{Q8`{i6T5^nywgqQI5bDIymoa7CSZG|WWVk>GM9)zy*bNih|QIm z%0+(Nnc*a_xo;$=!HQYaapLms>J1ToyjtFByY`C2H1wT#178#4+|{H0BBqtCdd$L% z_3Hc60j@{t9~MjM@LBalR&6@>B;9?r<7J~F+WXyYu*y3?px*=8MAK@EA+jRX8{CG?GI-< z54?Dc9CAh>QTAvyOEm0^+x;r2BWX|{3$Y7)L5l*qVE*y0`7J>l2wCmW zL1?|a`pJ-l{fb_N;R(Z9UMiSj6pQjOvQ^%DvhIJF!+Th7jO2~1f1N+(-TyCFYQZYw z4)>7caf^Ki_KJ^Zx2JUb z&$3zJy!*+rCV4%jqwyuNY3j1ZEiltS0xTzd+=itTb;IPYpaf?8Y+RSdVdpacB(bVQ zC(JupLfFp8y43%PMj2}T|VS@%LVp>hv4Y!RPMF?pp8U_$xCJ)S zQx!69>bphNTIb9yn*_yfj{N%bY)t{L1cs8<8|!f$;UQ*}IN=2<6lA;x^(`8t?;+ST zh)z4qeYYgZkIy{$4x28O-pugO&gauRh3;lti9)9Pvw+^)0!h~%m&8Q!AKX%urEMnl z?yEz?g#ODn$UM`+Q#$Q!6|zsq_`dLO5YK-6bJM6ya>}H+vnW^h?o$z;V&wvuM$dR& zeEq;uUUh$XR`TWeC$$c&Jjau2it3#%J-y}Qm>nW*s?En?R&6w@sDXMEr#8~$=b(gk zwDC3)NtAP;M2BW_lL^5ShpK$D%@|BnD{=!Tq)o(5@z3i7Z){} zGr}Exom_qDO{kAVkZ*MbLNHE666Kina#D{&>Jy%~w7yX$oj;cYCd^p9zy z8*+wgSEcj$4{WxKmCF(5o7U4jqwEvO&dm1H#7z}%VXAbW&W24v-tS6N3}qrm1OnE)fUkoE8yMMn9S$?IswS88tQWm4#Oid#ckgr6 zRtHm!mfNl-`d>O*1~d7%;~n+{Rph6BBy^95zqI{K((E!iFQ+h*C3EsbxNo_aRm5gj zKYug($r*Q#W9`p%Bf{bi6;IY0v`pB^^qu)gbg9QHQ7 zWBj(a1YSu)~2RK8Pi#C>{DMlrqFb9e_RehEHyI{n?e3vL_}L>kYJC z_ly$$)zFi*SFyNrnOt(B*7E$??s67EO%DgoZL2XNk8iVx~X_)o++4oaK1M|ou73vA0K^503j@uuVmLcHH4ya-kOIDfM%5%(E z+Xpt~#7y2!KB&)PoyCA+$~DXqxPxxALy!g-O?<9+9KTk4Pgq4AIdUkl`1<1#j^cJg zgU3`0hkHj_jxV>`Y~%LAZl^3o0}`Sm@iw7kwff{M%VwtN)|~!p{AsfA6vB5UolF~d zHWS%*uBDt<9y!9v2Xe|au&1j&iR1HXCdyCjxSgG*L{wmTD4(NQ=mFjpa~xooc6kju z`~+d{j7$h-;HAB04H!Zscu^hZffL#9!p$)9>sRI|Yovm)g@F>ZnosF2EgkU3ln0bR zTA}|+E(tt)!SG)-bEJi_0m{l+(cAz^pi}`9=~n?y&;2eG;d9{M6nj>BHGn(KA2n|O zt}$=FPq!j`p&kQ8>cirSzkU0c08%8{^Qyqi-w2LoO8)^E7;;I1;HQ6B$u0nNaX2CY zSmfi)F`m94zL8>#zu;8|{aBui@RzRKBlP1&mfFxEC@%cjl?NBs`cr^nm){>;$g?rhKr$AO&6qV_Wbn^}5tfFBry^e1`%du2~o zs$~dN;S_#%iwwA_QvmMjh%Qo?0?rR~6liyN5Xmej8(*V9ym*T`xAhHih-v$7U}8=dfXi2i*aAB!xM(Xekg*ix@r|ymDw*{*s0?dlVys2e)z62u1 z+k3esbJE=-P5S$&KdFp+2H7_2e=}OKDrf( z9-207?6$@f4m4B+9E*e((Y89!q?zH|mz_vM>kp*HGXldO0Hg#!EtFhRuOm$u8e~a9 z5(roy7m$Kh+zjW6@zw{&20u?1f2uP&boD}$#Zy)4o&T;vyBoqFiF2t;*g=|1=)PxB z8eM3Mp=l_obbc?I^xyLz?4Y1YDWPa+nm;O<$Cn;@ane616`J9OO2r=rZr{I_Kizyc zP#^^WCdIEp*()rRT+*YZK>V@^Zs=ht32x>Kwe zab)@ZEffz;VM4{XA6e421^h~`ji5r%)B{wZu#hD}f3$y@L0JV9f3g{-RK!A?vBUA}${YF(vO4)@`6f1 z-A|}e#LN{)(eXloDnX4Vs7eH|<@{r#LodP@Nz--$Dg_Par%DCpu2>2jUnqy~|J?eZ zBG4FVsz_A+ibdwv>mLp>P!(t}E>$JGaK$R~;fb{O3($y1ssQQo|5M;^JqC?7qe|hg zu0ZOqeFcp?qVn&Qu7FQJ4hcFi&|nR!*j)MF#b}QO^lN%5)4p*D^H+B){n8%VPUzi! zDihoGcP71a6!ab`l^hK&*dYrVYzJ0)#}xVrp!e;lI!+x+bfCN0KXwUAPU9@#l7@0& QuEJmfE|#`Dqx|px0L@K;Y5)KL literal 0 HcmV?d00001 diff --git a/lib/sdk/server/gradle/wrapper/gradle-wrapper.properties b/lib/sdk/server/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..070cb70 --- /dev/null +++ b/lib/sdk/server/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/lib/sdk/server/gradlew b/lib/sdk/server/gradlew new file mode 100755 index 0000000..1b6c787 --- /dev/null +++ b/lib/sdk/server/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/lib/sdk/server/gradlew.bat b/lib/sdk/server/gradlew.bat new file mode 100644 index 0000000..ac1b06f --- /dev/null +++ b/lib/sdk/server/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/lib/sdk/server/packaging-test/Makefile b/lib/sdk/server/packaging-test/Makefile new file mode 100644 index 0000000..bd48043 --- /dev/null +++ b/lib/sdk/server/packaging-test/Makefile @@ -0,0 +1,162 @@ +.PHONY: all clean test-default-jar test-thin-jar test-default-jar-classes test-thin-jar-classes + +# Running "make all" will verify the following things about the SDK jar distributions: +# - Each jar contains the expected set of classes, broadly speaking. +# - A simple Java application is able to instantiate an SDK client, when the SDK jar and any +# non-bundled dependencies are on the classpath. +# - An OSGi bundle is able to instantiate an SDK client, when the SDK and any non-bundled +# dependencies are also installed as OSGi bundles; i.e., the correct packages are being +# exported and imported. This is our only test of this configuration, since our integration +# testing for Java does not use OSGi. + +BASE_DIR:=$(shell pwd) +PROJECT_DIR=$(shell cd .. && pwd) +SDK_VERSION=$(shell grep "version=" $(PROJECT_DIR)/gradle.properties | cut -d '=' -f 2) + +export TEMP_DIR=$(BASE_DIR)/temp + +JAR=$(if $(shell which jar),jar,$(JAVA_HOME)/bin/jar) + +LOCAL_VERSION=99.99.99-SNAPSHOT +MAVEN_LOCAL_REPO=$(HOME)/.m2/repository +TEMP_MAVEN_OUTPUT_DIR=$(MAVEN_LOCAL_REPO)/com/launchdarkly/launchdarkly-java-server-sdk/$(LOCAL_VERSION) +SDK_DEFAULT_JAR=$(TEMP_MAVEN_OUTPUT_DIR)/launchdarkly-java-server-sdk-$(LOCAL_VERSION).jar +SDK_ALL_JAR=$(TEMP_MAVEN_OUTPUT_DIR)/launchdarkly-java-server-sdk-$(LOCAL_VERSION)-all.jar +SDK_THIN_JAR=$(TEMP_MAVEN_OUTPUT_DIR)/launchdarkly-java-server-sdk-$(LOCAL_VERSION)-thin.jar +POM_XML=$(TEMP_MAVEN_OUTPUT_DIR)/launchdarkly-java-server-sdk-$(LOCAL_VERSION).pom + +export TEMP_OUTPUT=$(TEMP_DIR)/test.out + +# Build product of the project in ./test-app; can be run as either a regular app or an OSGi bundle +TEST_APP_JAR=$(TEMP_DIR)/test-app.jar +TEST_APP_SOURCES=$(shell find $(BASE_DIR)/test-app -name *.java) $(BASE_DIR)/test-app/build.gradle + +# Felix OSGi container +export FELIX_DIR=$(TEMP_DIR)/felix +export FELIX_JAR=$(FELIX_DIR)/lib/felix.jar +export FELIX_BASE_BUNDLE_DIR=$(FELIX_DIR)/base-bundles +export TEMP_BUNDLE_DIR=$(FELIX_DIR)/app-bundles + +# Lists of jars to use as a classpath (for the non-OSGi runtime test) or to install as bundles (for +# the OSGi test). Note that we're assuming that all of the SDK's dependencies have built-in support +# for OSGi, which is currently true; if that weren't true, we would have to do something different +# to put them on the system classpath in the OSGi test. +RUN_JARS_test-default-jar=$(TEST_APP_JAR) $(SDK_DEFAULT_JAR) \ + $(shell ls $(TEMP_DIR)/dependencies-external/*.jar 2>/dev/null) +RUN_JARS_test-thin-jar=$(TEST_APP_JAR) $(SDK_THIN_JAR) \ + $(shell ls $(TEMP_DIR)/dependencies-internal/*.jar 2>/dev/null) \ + $(shell ls $(TEMP_DIR)/dependencies-external/*.jar 2>/dev/null) + +# grep variants with lookahead aren't universally available +PGREP=$(if $(shell echo a | grep -P . 2>/dev/null),grep -P,$(if $(shell which ggrep),ggrep -P,echo This makefile requires grep -P or ggrep -P && exit 1;)) +classes_prepare=echo " checking $(1)..." && $(JAR) tf $(1) | grep '\.class$$' >$(TEMP_OUTPUT) +classes_should_contain=echo " should contain $(2)" && $(PGREP) "^$(1)/.*\.class$$" $(TEMP_OUTPUT) >/dev/null +classes_should_not_contain=echo " should not contain $(2)" && ! $(PGREP) "^$(1)/.*\.class$$" $(TEMP_OUTPUT) +should_not_have_module_info=echo " should not have module-info.class" && ! grep "module-info\.class$$" $(TEMP_OUTPUT) + +verify_sdk_classes= \ + $(call classes_should_contain,com/launchdarkly/sdk,com.launchdarkly.sdk) && \ + $(call classes_should_contain,com/launchdarkly/sdk/json,com.launchdarkly.sdk.json) && \ + $(foreach subpkg,$(sdk_subpackage_names), \ + $(call classes_should_contain,com/launchdarkly/sdk/$(subpkg),com.launchdarkly.sdk.$(subst /,.,$(subpkg))) && ) true +sdk_subpackage_names= \ + $(shell cd $(PROJECT_DIR)/src/main/java/com/launchdarkly/sdk && find . ! -path . -type d | sed -e 's@^\./@@') + +manifest_should_not_have_classpath= \ + echo " should not have Class-Path in manifest" && \ + ! (unzip -q -c $(1) META-INF/MANIFEST.MF | grep 'Class-Path') + +caption=echo "" && echo "$(1)" + +all: test-default-jar test-thin-jar test-pom + +clean: + rm -rf $(TEMP_DIR)/* + rm -rf test-app/build + +# SECONDEXPANSION is needed so we can use "$@" inside a variable in the prerequisite list of the test targets +.SECONDEXPANSION: + +test-default-jar test-thin-jar: $$@-classes $(TEST_APP_JAR) get-sdk-dependencies $$(RUN_JARS_$$@) $(FELIX_DIR) + @$(call caption,$@) + @./run-non-osgi-test.sh $(RUN_JARS_$@) + @./run-osgi-test.sh $(RUN_JARS_$@) + +test-default-jar-classes: $(SDK_DEFAULT_JAR) $(TEMP_DIR) + @$(call caption,$@) + @$(call classes_prepare,$<) + @$(call verify_sdk_classes) + @$(call classes_should_contain,com/launchdarkly/logging,unshaded com.launchdarkly.logging classes) + @$(call classes_should_not_contain,com/launchdarkly/shaded/com/launchdarkly/sdk(?!/internal),shaded non-internal SDK classes) + @$(call classes_should_contain,com/launchdarkly/shaded/com/google/gson,shaded Gson) + @$(call classes_should_not_contain,com/google/gson,unshaded Gson) + @$(call classes_should_not_contain,org/slf4j,unshaded SLF4j) + @$(call classes_should_not_contain,com/launchdarkly/shaded/org/slf4j,shaded SLF4j) + @$(call classes_should_not_contain,com/fasterxml/jackson,unshaded Jackson) + @$(call classes_should_not_contain,com/launchdarkly/shaded/com/fasterxml/jackson,shaded Jackson) + @$(call should_not_have_module_info) + @$(call manifest_should_not_have_classpath,$<) + +test-thin-jar-classes: $(SDK_THIN_JAR) $(TEMP_DIR) + @$(call caption,$@) + @$(call classes_prepare,$<) + @$(call verify_sdk_classes) + @echo " should not contain anything other than SDK classes" + @! grep -v "^com/launchdarkly/sdk" $(TEMP_OUTPUT) + @$(call should_not_have_module_info) + @$(call manifest_should_not_have_classpath,$<) + +test-pom: $(POM_XML) + @$(call caption,$@) + @echo "=== contents of $<" + @cat $< + @echo "===" + @echo " should not have any dependencies" + @! grep '' $< || (echo " FAILED" && exit 1) + +$(SDK_DEFAULT_JAR) $(SDK_ALL_JAR) $(SDK_THIN_JAR) $(POM_XML): + cd .. && ./gradlew publishToMavenLocal -P version=$(LOCAL_VERSION) -P LD_SKIP_SIGNING=1 + @# publishToMavenLocal creates not only the jars but also the pom + +$(TEST_APP_JAR): $(SDK_THIN_JAR) $(TEST_APP_SOURCES) | $(TEMP_DIR) + @mkdir -p $(TEMP_DIR)/dependencies-app + @cd test-app && ../../gradlew jar + @cp $(BASE_DIR)/test-app/build/libs/test-app-*.jar $@ + +get-sdk-dependencies: $(TEMP_DIR)/dependencies-all $(TEMP_DIR)/dependencies-external $(TEMP_DIR)/dependencies-internal + +$(TEMP_DIR)/dependencies-all: | $(TEMP_DIR) + @[ -d $@ ] || mkdir -p $@ + @cd .. && ./gradlew exportDependencies + @cp $(TEMP_DIR)/dependencies-app/*.jar $@ + +$(TEMP_DIR)/dependencies-external: $(TEMP_DIR)/dependencies-all + @[ -d $@ ] || mkdir -p $@ + @cp $(TEMP_DIR)/dependencies-all/slf4j*.jar $@ + @cp $(TEMP_DIR)/dependencies-all/gson*.jar $@ + @cp $(TEMP_DIR)/dependencies-all/jackson*.jar $@ + +$(TEMP_DIR)/dependencies-internal: $(TEMP_DIR)/dependencies-all + [ -d $@ ] || mkdir -p $@ + cp $(TEMP_DIR)/dependencies-all/*.jar $@ + rm $@/slf4j*.jar + rm $@/jackson*.jar + +$(FELIX_JAR): $(FELIX_DIR) + +$(FELIX_DIR): + mkdir -p $(FELIX_DIR) + mkdir -p $(FELIX_DIR)/lib + mkdir -p $(FELIX_BASE_BUNDLE_DIR) + cd test-app && ../../gradlew createOsgi + @# createOsgi is a target provided by the osgi-run Gradle plugin; it downloads the Felix container and + @# puts it in build/osgi along with related bundles and a config file. + cp -r test-app/build/osgi/conf $(FELIX_DIR) + echo "felix.shutdown.hook=false" >>$(FELIX_DIR)/conf/config.properties + @# setting felix.shutdown.hook to false allows our test app to use System.exit() + cp test-app/build/osgi/system-libs/org.apache.felix.main-*.jar $(FELIX_JAR) + cp test-app/build/osgi/bundle/* $(FELIX_BASE_BUNDLE_DIR) + cd $(FELIX_BASE_BUNDLE_DIR) && rm -f launchdarkly-*.jar gson-*.jar + +$(TEMP_DIR): + [ -d $@ ] || mkdir -p $@ diff --git a/lib/sdk/server/packaging-test/run-non-osgi-test.sh b/lib/sdk/server/packaging-test/run-non-osgi-test.sh new file mode 100755 index 0000000..2c1f0cd --- /dev/null +++ b/lib/sdk/server/packaging-test/run-non-osgi-test.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +set -e + +function run_test() { + rm -f ${TEMP_OUTPUT} + touch ${TEMP_OUTPUT} + classpath=$(echo "$@" | sed -e 's/ /:/g') + java -classpath "$classpath" testapp.TestApp | tee ${TEMP_OUTPUT} + grep "TestApp: PASS" ${TEMP_OUTPUT} >/dev/null +} + +# It does not make sense to test the "thin" jar without Gson. The SDK uses Gson internally +# and can't work without it; in the default jar and the "all" jar, it has its own embedded +# copy of Gson, but the "thin" jar does not include any third-party dependencies so you must +# provide all of them including Gson. +echo "" +if [[ "$@" =~ $thin_sdk_regex ]]; then + echo " non-OSGi runtime test - without Jackson" + filtered_deps="" + json_jar_regex=".*jackson.*" + for dep in $@; do + if [[ ! "$dep" =~ $json_jar_regex ]]; then + filtered_deps="$filtered_deps $dep" + fi + done + run_test $filtered_deps + grep "skipping LDJackson tests" ${TEMP_OUTPUT} >/dev/null || \ + (echo "FAIL: should have skipped LDJackson tests but did not; test setup was incorrect" && exit 1) +else + echo " non-OSGi runtime test - without Gson or Jackson" + filtered_deps="" + json_jar_regex=".*gson.*|.*jackson.*" + for dep in $@; do + if [[ ! "$dep" =~ $json_jar_regex ]]; then + filtered_deps="$filtered_deps $dep" + fi + done + run_test $filtered_deps + grep "skipping LDGson tests" ${TEMP_OUTPUT} >/dev/null || \ + (echo "FAIL: should have skipped LDGson tests but did not; test setup was incorrect" && exit 1) + grep "skipping LDJackson tests" ${TEMP_OUTPUT} >/dev/null || \ + (echo "FAIL: should have skipped LDJackson tests but did not; test setup was incorrect" && exit 1) +fi + +echo "" +echo " non-OSGi runtime test - with Gson and Jackson" +run_test $@ +grep "LDGson tests OK" ${TEMP_OUTPUT} >/dev/null || (echo "FAIL: should have run LDGson tests but did not" && exit 1) +grep "LDJackson tests OK" ${TEMP_OUTPUT} >/dev/null || (echo "FAIL: should have run LDJackson tests but did not" && exit 1) diff --git a/lib/sdk/server/packaging-test/run-osgi-test.sh b/lib/sdk/server/packaging-test/run-osgi-test.sh new file mode 100755 index 0000000..22bc524 --- /dev/null +++ b/lib/sdk/server/packaging-test/run-osgi-test.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +set -e + +# This script uses Felix to run the test application as an OSGi bundle, with or without +# additional bundles to support the optional Gson and Jackson integrations. We are +# verifying that the SDK itself works correctly as an OSGi bundle, and also that its +# imports of other bundles work correctly. +# +# This test is being run in CI using the lowest compatible JDK version. It may not work +# in higher JDK versions due to incompatibilities with the version of Felix we are using. + +JAR_DEPS="$@" + +# We can't test the "thin" jar in OSGi, because some of our third-party dependencies +# aren't available as OSGi bundles. That isn't a plausible use case anyway. +thin_sdk_regex=".*launchdarkly-java-server-sdk-[^ ]*-thin\\.jar" +if [[ "${JAR_DEPS}" =~ $thin_sdk_regex ]]; then + exit 0 +fi + +rm -rf ${TEMP_BUNDLE_DIR} +mkdir -p ${TEMP_BUNDLE_DIR} + +function copy_deps() { + if [ -n "${JAR_DEPS}" ]; then + cp ${JAR_DEPS} ${TEMP_BUNDLE_DIR} + fi + cp ${FELIX_BASE_BUNDLE_DIR}/* ${TEMP_BUNDLE_DIR} +} + +function run_test() { + rm -rf ${FELIX_DIR}/felix-cache + rm -f ${TEMP_OUTPUT} + touch ${TEMP_OUTPUT} + cd ${FELIX_DIR} + java -jar ${FELIX_JAR} -b ${TEMP_BUNDLE_DIR} | tee ${TEMP_OUTPUT} + grep "TestApp: PASS" ${TEMP_OUTPUT} >/dev/null +} + +echo "" +echo " OSGi runtime test - without Gson or Jackson" +copy_deps +rm ${TEMP_BUNDLE_DIR}/*gson*.jar ${TEMP_BUNDLE_DIR}/*jackson*.jar +ls ${TEMP_BUNDLE_DIR} +run_test +grep "skipping LDGson tests" ${TEMP_OUTPUT} >/dev/null || \ + (echo "FAIL: should have skipped LDGson tests but did not; test setup was incorrect" && exit 1) +grep "skipping LDJackson tests" ${TEMP_OUTPUT} >/dev/null || \ + (echo "FAIL: should have skipped LDJackson tests but did not; test setup was incorrect" && exit 1) + +echo "" +echo " OSGi runtime test - with Gson and Jackson" +copy_deps +run_test +grep "LDGson tests OK" ${TEMP_OUTPUT} >/dev/null || (echo "FAIL: should have run LDGson tests but did not" && exit 1) +grep "LDJackson tests OK" ${TEMP_OUTPUT} >/dev/null || (echo "FAIL: should have run LDJackson tests but did not" && exit 1) diff --git a/lib/sdk/server/packaging-test/test-app/build.gradle b/lib/sdk/server/packaging-test/test-app/build.gradle new file mode 100644 index 0000000..53b26c0 --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/build.gradle @@ -0,0 +1,70 @@ + +buildscript { + repositories { + mavenCentral() + } +} + +plugins { + id "java" + id "java-library" + id "biz.aQute.bnd.builder" version "5.0.1" + id "com.athaydes.osgi-run" version "1.6.0" +} + +repositories { + mavenCentral() +} + +allprojects { + group = "com.launchdarkly" + version = "1.0.0" + archivesBaseName = 'test-app-bundle' + sourceCompatibility = 1.8 + targetCompatibility = 1.8 +} + +ext.versions = [ + "gson": "2.8.9", + "jackson": "2.10.0" +] + +dependencies { + // Note, the SDK build must have already been run before this, since we're using its product as a dependency + implementation fileTree(dir: "../../build/libs", include: "launchdarkly-java-server-sdk-*-thin.jar") + implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" + implementation "com.google.code.gson:gson:${versions.gson}" + implementation "org.slf4j:slf4j-api:1.7.22" + implementation "org.osgi:osgi_R4_core:1.0" + osgiRuntime "org.slf4j:slf4j-simple:1.7.22" +} + +task exportDependencies(type: Copy, dependsOn: compileJava) { + into "../temp/dependencies-app" + from configurations.runtimeClasspath.resolvedConfiguration.resolvedArtifacts.collect { it.file } +} + +jar { + bnd( + // This consumer-policy directive completely turns off version checking for the test app's + // OSGi imports, so for instance if the app uses version 2.x of package P, the import will + // just be for p rather than p;version="[2.x,3)". One wouldn't normally do this, but we + // need to be able to run the CI tests for snapshot/beta versions, and bnd does not handle + // those correctly (5.0.0-beta1 will become "[5.0.0,6)" which will not work because the + // beta is semantically *before* 5.0.0). + '-consumer-policy': '', + 'Bundle-Activator': 'testapp.TestAppOsgiEntryPoint', + 'Import-Package': 'com.launchdarkly.sdk,com.launchdarkly.sdk.json' + + ',com.launchdarkly.sdk.server,org.slf4j' + + ',org.osgi.framework' + + ',com.google.gson;resolution:=optional' + + ',com.fasterxml.jackson.*;resolution:=optional' + ) + + finalizedBy(exportDependencies) +} + +runOsgi { + bundles = [ ] // we don't need a CLI or anything like that - just the SLF4j dependency shown above +} diff --git a/lib/sdk/server/packaging-test/test-app/settings.gradle b/lib/sdk/server/packaging-test/test-app/settings.gradle new file mode 100644 index 0000000..e2a1182 --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'test-app-bundle' diff --git a/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/JsonSerializationTestData.java b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/JsonSerializationTestData.java new file mode 100644 index 0000000..d1c037f --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/JsonSerializationTestData.java @@ -0,0 +1,44 @@ +package testapp; + +import com.launchdarkly.sdk.*; +import java.util.*; + +public class JsonSerializationTestData { + public static class TestItem { + final Object objectToSerialize; + final String expectedJson; + + private TestItem(Object objectToSerialize, String expectedJson) { + this.objectToSerialize = objectToSerialize; + this.expectedJson = expectedJson; + } + } + + public static TestItem[] TEST_ITEMS = new TestItem[] { + new TestItem( + LDValue.buildArray().add(1).add(2).build(), + "[1,2]" + ), + new TestItem( + Collections.singletonMap("value", LDValue.buildArray().add(1).add(2).build()), + "{\"value\":[1,2]}" + ), + new TestItem( + EvaluationReason.off(), + "{\"kind\":\"OFF\"}" + ), + new TestItem( + LDContext.create("userkey"), + "{\"kind\":\"user\",\"key\":\"userkey\"}" + ) + }; + + public static boolean assertJsonEquals(String expectedJson, String actualJson, Object objectToSerialize) { + if (!LDValue.parse(actualJson).equals(LDValue.parse(expectedJson))) { + TestApp.addError("JSON encoding of " + objectToSerialize.getClass() + " should have been " + + expectedJson + ", was " + actualJson, null); + return false; + } + return true; + } +} \ No newline at end of file diff --git a/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestApp.java b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestApp.java new file mode 100644 index 0000000..a6d87be --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestApp.java @@ -0,0 +1,82 @@ +package testapp; + +import com.launchdarkly.sdk.*; +import com.launchdarkly.sdk.json.*; +import com.launchdarkly.sdk.server.*; +import java.util.*; +import org.slf4j.*; + +public class TestApp { + private static final Logger logger = LoggerFactory.getLogger(TestApp.class); // proves SLF4J API is on classpath + + private static List errors = new ArrayList<>(); + + public static void main(String[] args) throws Exception { + try { + LDConfig config = new LDConfig.Builder() + .offline(true) + .build(); + LDClient client = new LDClient("fake-sdk-key", config); + log("client creation OK"); + } catch (RuntimeException e) { + addError("client creation failed", e); + } + + try { + boolean jsonOk = true; + for (JsonSerializationTestData.TestItem item: JsonSerializationTestData.TEST_ITEMS) { + if (!(item instanceof JsonSerializable)) { + continue; // things without our marker interface, like a Map, can't be passed to JsonSerialization.serialize + } + String actualJson = JsonSerialization.serialize((JsonSerializable)item.objectToSerialize); + if (!JsonSerializationTestData.assertJsonEquals(item.expectedJson, actualJson, item.objectToSerialize)) { + jsonOk = false; + } + } + if (jsonOk) { + log("JsonSerialization tests OK"); + } + } catch (RuntimeException e) { + addError("unexpected error in JsonSerialization tests", e); + } + + try { + Class.forName("testapp.TestAppGsonTests"); // see TestAppGsonTests for why we're loading it in this way + } catch (NoClassDefFoundError e) { + log("skipping LDGson tests because Gson is not in the classpath"); + } catch (RuntimeException e) { + addError("unexpected error in LDGson tests", e); + } + + try { + Class.forName("testapp.TestAppJacksonTests"); // see TestAppJacksonTests for why we're loading it in this way + } catch (NoClassDefFoundError e) { + log("skipping LDJackson tests because Jackson is not in the classpath"); + } catch (RuntimeException e) { + addError("unexpected error in LDJackson tests", e); + } + + if (errors.isEmpty()) { + log("PASS"); + } else { + for (String err: errors) { + log("ERROR: " + err); + } + log("FAIL"); + System.exit(1); + } + } + + public static void addError(String message, Throwable e) { + if (e != null) { + errors.add(message + ": " + e); + e.printStackTrace(); + } else { + errors.add(message); + } + } + + public static void log(String message) { + System.out.println("TestApp: " + message); + } +} \ No newline at end of file diff --git a/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppGsonTests.java b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppGsonTests.java new file mode 100644 index 0000000..1ea44e0 --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppGsonTests.java @@ -0,0 +1,42 @@ +package testapp; + +import com.google.gson.*; +import com.launchdarkly.sdk.*; +import com.launchdarkly.sdk.json.*; + +// This code is in its own class that is loaded dynamically because some of our test scenarios +// involve running TestApp without having Gson in the classpath, to make sure the SDK does not +// *require* the presence of an external Gson even though it can interoperate with one. + +public class TestAppGsonTests { + // Use static block so simply loading this class causes the tests to execute + static { + // First try referencing Gson, so we fail right away if it's not on the classpath + Class c = Gson.class; + try { + runGsonTests(); + } catch (NoClassDefFoundError e) { + // If we've even gotten to this static block, then Gson itself *is* on the application's + // classpath, so this must be some other kind of classloading error that we do want to + // report. For instance, a NoClassDefFound error for Gson at this point, if we're in + // OSGi, would mean that the SDK bundle is unable to see the external Gson classes. + TestApp.addError("unexpected error in LDGson tests", e); + } + } + + public static void runGsonTests() { + Gson gson = new GsonBuilder().registerTypeAdapterFactory(LDGson.typeAdapters()).create(); + + boolean ok = true; + for (JsonSerializationTestData.TestItem item: JsonSerializationTestData.TEST_ITEMS) { + String actualJson = gson.toJson(item.objectToSerialize); + if (!JsonSerializationTestData.assertJsonEquals(item.expectedJson, actualJson, item.objectToSerialize)) { + ok = false; + } + } + + if (ok) { + TestApp.log("LDGson tests OK"); + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppJacksonTests.java b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppJacksonTests.java new file mode 100644 index 0000000..531f3b1 --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppJacksonTests.java @@ -0,0 +1,43 @@ +package testapp; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.launchdarkly.sdk.*; +import com.launchdarkly.sdk.json.*; + +// This code is in its own class that is loaded dynamically because some of our test scenarios +// involve running TestApp without having Jackson in the classpath, to make sure the SDK does not +// *require* the presence of an external Jackson even though it can interoperate with one. + +public class TestAppJacksonTests { + // Use static block so simply loading this class causes the tests to execute + static { + // First try referencing Jackson, so we fail right away if it's not on the classpath + Class c = ObjectMapper.class; + try { + runJacksonTests(); + } catch (Exception e) { + // If we've even gotten to this static block, then Jackson itself *is* on the application's + // classpath, so this must be some other kind of classloading error that we do want to + // report. For instance, a NoClassDefFound error for Jackson at this point, if we're in + // OSGi, would mean that the SDK bundle is unable to see the external Jackson classes. + TestApp.addError("unexpected error in LDJackson tests", e); + } + } + + public static void runJacksonTests() throws Exception { + ObjectMapper jacksonMapper = new ObjectMapper(); + jacksonMapper.registerModule(LDJackson.module()); + + boolean ok = true; + for (JsonSerializationTestData.TestItem item: JsonSerializationTestData.TEST_ITEMS) { + String actualJson = jacksonMapper.writeValueAsString(item.objectToSerialize); + if (!JsonSerializationTestData.assertJsonEquals(item.expectedJson, actualJson, item.objectToSerialize)) { + ok = false; + } + } + + if (ok) { + TestApp.log("LDJackson tests OK"); + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppOsgiEntryPoint.java b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppOsgiEntryPoint.java new file mode 100644 index 0000000..65602cd --- /dev/null +++ b/lib/sdk/server/packaging-test/test-app/src/main/java/testapp/TestAppOsgiEntryPoint.java @@ -0,0 +1,17 @@ +package testapp; + +import org.osgi.framework.BundleActivator; +import org.osgi.framework.BundleContext; + +public class TestAppOsgiEntryPoint implements BundleActivator { + public void start(BundleContext context) throws Exception { + System.out.println("TestApp: starting test bundle"); + + TestApp.main(new String[0]); + + System.exit(0); + } + + public void stop(BundleContext context) throws Exception { + } +} \ No newline at end of file diff --git a/lib/sdk/server/scripts/release.sh b/lib/sdk/server/scripts/release.sh new file mode 100755 index 0000000..b276b53 --- /dev/null +++ b/lib/sdk/server/scripts/release.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# This script updates the version for the java-server-sdk library and releases the artifact + javadoc +# It will only work if you have the proper credentials set up in ~/.gradle/gradle.properties + +# It takes exactly one argument: the new version. +# It should be run from the root of this git repo like this: +# ./scripts/release.sh 4.0.9 + +# When done you should commit and push the changes made. + +set -uxe +echo "Starting java-server-sdk release." + +$(dirname $0)/update-version.sh $1 + +./gradlew clean publish closeAndReleaseRepository +./gradlew publishGhPages +echo "Finished java-server-sdk release." diff --git a/lib/sdk/server/scripts/update-version.sh b/lib/sdk/server/scripts/update-version.sh new file mode 100755 index 0000000..3b96959 --- /dev/null +++ b/lib/sdk/server/scripts/update-version.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +VERSION=$1 + +# Update version in gradle.properties file: +sed -i.bak "s/^version.*$/version=${VERSION}/" gradle.properties +rm -f gradle.properties.bak + +# Update version in README.md: +sed -i.bak "s/.*<\/version>/${VERSION}<\/version>/" README.md +sed -i.bak "s/\"com.launchdarkly:launchdarkly-java-server-sdk:.*\"/\"com.launchdarkly:launchdarkly-java-server-sdk:${VERSION}\"/" README.md +rm -f README.md.bak diff --git a/lib/sdk/server/settings.gradle b/lib/sdk/server/settings.gradle new file mode 100644 index 0000000..2315041 --- /dev/null +++ b/lib/sdk/server/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'launchdarkly-java-server-sdk' diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/json/SdkSerializationExtensions.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/json/SdkSerializationExtensions.java new file mode 100644 index 0000000..5044c57 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/json/SdkSerializationExtensions.java @@ -0,0 +1,16 @@ +package com.launchdarkly.sdk.json; + +import com.google.common.collect.ImmutableList; +import com.launchdarkly.sdk.server.FeatureFlagsState; + +// See JsonSerialization.getDeserializableClasses in java-sdk-common. + +abstract class SdkSerializationExtensions { + private SdkSerializationExtensions() {} + + public static Iterable> getDeserializableClasses() { + return ImmutableList.>of( + FeatureFlagsState.class + ); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImpl.java new file mode 100644 index 0000000..11c3a0f --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImpl.java @@ -0,0 +1,30 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; + +final class BigSegmentStoreStatusProviderImpl implements BigSegmentStoreStatusProvider { + private final EventBroadcasterImpl statusNotifier; + private final BigSegmentStoreWrapper storeWrapper; + + BigSegmentStoreStatusProviderImpl( + EventBroadcasterImpl bigSegmentStatusNotifier, + BigSegmentStoreWrapper storeWrapper) { + this.storeWrapper = storeWrapper; + this.statusNotifier = bigSegmentStatusNotifier; + } + + @Override + public Status getStatus() { + return storeWrapper == null ? new Status(false, false) : storeWrapper.getStatus(); + } + + @Override + public void addStatusListener(StatusListener listener) { + statusNotifier.register(listener); + } + + @Override + public void removeStatusListener(StatusListener listener) { + statusNotifier.unregister(listener); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapper.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapper.java new file mode 100644 index 0000000..7a682c4 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapper.java @@ -0,0 +1,167 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.StatusListener; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; + +import org.apache.commons.codec.digest.DigestUtils; +import org.checkerframework.checker.nullness.qual.NonNull; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Base64; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; + +class BigSegmentStoreWrapper implements Closeable { + private final BigSegmentStore store; + private final Duration staleAfter; + private final ScheduledFuture pollFuture; + private final LoadingCache cache; + private final EventBroadcasterImpl statusProvider; + private final LDLogger logger; + private final Object statusLock = new Object(); + private Status lastStatus; + + BigSegmentStoreWrapper(BigSegmentsConfiguration config, + EventBroadcasterImpl statusProvider, + ScheduledExecutorService sharedExecutor, + LDLogger logger) { + this.store = config.getStore(); + this.staleAfter = config.getStaleAfter(); + this.statusProvider = statusProvider; + this.logger = logger; + + CacheLoader loader = new CacheLoader() { + @Override + public Membership load(@NonNull String key) { + Membership membership = queryMembership(key); + return membership == null ? createMembershipFromSegmentRefs(null, null) : membership; + } + }; + this.cache = CacheBuilder.newBuilder() + .maximumSize(config.getUserCacheSize()) + .expireAfterWrite(config.getUserCacheTime()) + .build(loader); + + this.pollFuture = sharedExecutor.scheduleAtFixedRate(this::pollStoreAndUpdateStatus, + 0, + config.getStatusPollInterval().toMillis(), + TimeUnit.MILLISECONDS); + } + + @Override + public void close() throws IOException { + pollFuture.cancel(true); + cache.invalidateAll(); + store.close(); + } + + /** + * Called by the evaluator when it needs to get the Big Segment membership state for a user. + *

+ * If there is a cached membership state for the user, it returns the cached state. Otherwise, + * it converts the user key into the hash string used by the BigSegmentStore, queries the store, + * and caches the result. The returned status value indicates whether the query succeeded, and + * whether the result (regardless of whether it was from a new query or the cache) should be + * considered "stale". + * + * @param userKey the (unhashed) user key + * @return the query result + */ + BigSegmentsQueryResult getUserMembership(String userKey) { + BigSegmentsQueryResult ret = new BigSegmentsQueryResult(); + try { + ret.membership = cache.get(userKey); + ret.status = getStatus().isStale() ? BigSegmentsStatus.STALE : BigSegmentsStatus.HEALTHY; + } catch (Exception e) { + logger.error("Big Segment store returned error: {}", e.toString()); + logger.debug(e.toString(), e); + ret.membership = null; + ret.status = BigSegmentsStatus.STORE_ERROR; + } + return ret; + } + + private Membership queryMembership(String userKey) { + String hash = hashForUserKey(userKey); + logger.debug("Querying Big Segment state for user hash {}", hash); + return store.getMembership(hash); + } + + /** + * Returns a BigSegmentStoreStatus describing whether the store seems to be available (that is, + * the last query to it did not return an error) and whether it is stale (that is, the last known + * update time is too far in the past). + *

+ * If we have not yet obtained that information (the poll task has not executed yet), then this + * method immediately does a metadata query and waits for it to succeed or fail. This means that + * if an application using Big Segments evaluates a feature flag immediately after creating the + * SDK client, before the first status poll has happened, that evaluation may block for however + * long it takes to query the store. + * + * @return the store status + */ + Status getStatus() { + Status ret; + synchronized (statusLock) { + ret = lastStatus; + } + if (ret != null) { + return ret; + } + return pollStoreAndUpdateStatus(); + } + + Status pollStoreAndUpdateStatus() { + boolean storeAvailable = false; + boolean storeStale = false; + logger.debug("Querying Big Segment store metadata"); + try { + StoreMetadata metadata = store.getMetadata(); + storeAvailable = true; + storeStale = metadata == null || isStale(metadata.getLastUpToDate()); + } catch (Exception e) { + logger.error("Big Segment store status query returned error: {}", e.toString()); + logger.debug(e.toString(), e); + } + Status newStatus = new Status(storeAvailable, storeStale); + Status oldStatus; + synchronized (this.statusLock) { + oldStatus = this.lastStatus; + this.lastStatus = newStatus; + } + if (!newStatus.equals(oldStatus)) { + logger.debug("Big Segment store status changed from {} to {}", oldStatus, newStatus); + statusProvider.broadcast(newStatus); + } + return newStatus; + } + + private boolean isStale(long updateTime) { + return staleAfter.minusMillis(System.currentTimeMillis() - updateTime).isNegative(); + } + + static String hashForUserKey(String userKey) { + byte[] encodedDigest = DigestUtils.sha256(userKey.getBytes(StandardCharsets.UTF_8)); + return Base64.getEncoder().encodeToString(encodedDigest); + } + + static class BigSegmentsQueryResult { + Membership membership; + BigSegmentsStatus status; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ClientContextImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ClientContextImpl.java new file mode 100644 index 0000000..6146b39 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ClientContextImpl.java @@ -0,0 +1,132 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreUpdateSink; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +/** + * This is the package-private implementation of {@link ClientContext} that contains additional non-public + * SDK objects that may be used by our internal components. + *

+ * All component factories, whether they are built-in ones or custom ones from the application, receive a + * {@link ClientContext} and can access its public properties. But only our built-in ones can see the + * package-private properties, which they can do by calling {@code ClientContextImpl.get(ClientContext)} + * to make sure that what they have is really a {@code ClientContextImpl} (as opposed to some other + * implementation of {@link ClientContext}, which might have been created for instance in application + * test code). + */ +final class ClientContextImpl extends ClientContext { + private static volatile ScheduledExecutorService fallbackSharedExecutor = null; + + final ScheduledExecutorService sharedExecutor; + final DiagnosticStore diagnosticStore; + final DataSourceUpdateSink dataSourceUpdateSink; + final DataStoreUpdateSink dataStoreUpdateSink; + + private ClientContextImpl( + ClientContext baseContext, + ScheduledExecutorService sharedExecutor, + DiagnosticStore diagnosticStore + ) { + super(baseContext.getSdkKey(), baseContext.getApplicationInfo(), baseContext.getHttp(), + baseContext.getLogging(), baseContext.isOffline(), baseContext.getServiceEndpoints(), + baseContext.getThreadPriority(), baseContext.getWrapperInfo()); + this.sharedExecutor = sharedExecutor; + this.diagnosticStore = diagnosticStore; + this.dataSourceUpdateSink = null; + this.dataStoreUpdateSink = null; + } + + private ClientContextImpl( + ClientContextImpl copyFrom, + DataSourceUpdateSink dataSourceUpdateSink, + DataStoreUpdateSink dataStoreUpdateSink + ) { + super(copyFrom); + this.dataSourceUpdateSink = dataSourceUpdateSink; + this.dataStoreUpdateSink = dataStoreUpdateSink; + this.diagnosticStore = copyFrom.diagnosticStore; + this.sharedExecutor = copyFrom.sharedExecutor; + } + + ClientContextImpl withDataSourceUpdateSink(DataSourceUpdateSink newDataSourceUpdateSink) { + return new ClientContextImpl(this, newDataSourceUpdateSink, this.dataStoreUpdateSink); + } + + ClientContextImpl withDataStoreUpdateSink(DataStoreUpdateSink newDataStoreUpdateSink) { + return new ClientContextImpl(this, this.dataSourceUpdateSink, newDataStoreUpdateSink); + } + + @Override + public DataSourceUpdateSink getDataSourceUpdateSink() { + return dataSourceUpdateSink; + } + + @Override + public DataStoreUpdateSink getDataStoreUpdateSink() { + return dataStoreUpdateSink; + } + + static ClientContextImpl fromConfig( + String sdkKey, + LDConfig config, + ScheduledExecutorService sharedExecutor + ) { + ClientContext minimalContext = new ClientContext(sdkKey, config.applicationInfo, null, + null, config.offline, config.serviceEndpoints, config.threadPriority, config.wrapperInfo); + LoggingConfiguration loggingConfig = config.logging.build(minimalContext); + + ClientContext contextWithLogging = new ClientContext(sdkKey, config.applicationInfo, null, + loggingConfig, config.offline, config.serviceEndpoints, config.threadPriority, config.wrapperInfo); + HttpConfiguration httpConfig = config.http.build(contextWithLogging); + + if (httpConfig.getProxy() != null) { + contextWithLogging.getBaseLogger().info("Using proxy: {} {} authentication.", + httpConfig.getProxy(), + httpConfig.getProxyAuthentication() == null ? "without" : "with"); + } + + ClientContext contextWithHttpAndLogging = new ClientContext(sdkKey, config.applicationInfo, httpConfig, + loggingConfig, config.offline, config.serviceEndpoints, config.threadPriority, config.wrapperInfo); + + // Create a diagnostic store only if diagnostics are enabled. Diagnostics are enabled as long as 1. the + // opt-out property was not set in the config, and 2. we are using the standard event processor. + DiagnosticStore diagnosticStore = null; + if (!config.diagnosticOptOut && config.events instanceof EventProcessorBuilder) { + diagnosticStore = new DiagnosticStore( + ServerSideDiagnosticEvents.getSdkDiagnosticParams(contextWithHttpAndLogging, config)); + } + + return new ClientContextImpl( + contextWithHttpAndLogging, + sharedExecutor, + diagnosticStore + ); + } + + /** + * This mechanism is a convenience for internal components to access the package-private fields of the + * context if it is a ClientContextImpl, and to receive null values for those fields if it is not. + * The latter case should only happen in application test code where the application developer has no + * way to create our package-private ClientContextImpl. In that case, we also generate a temporary + * sharedExecutor so components can work correctly in tests. + */ + static ClientContextImpl get(ClientContext context) { + if (context instanceof ClientContextImpl) { + return (ClientContextImpl)context; + } + synchronized (ClientContextImpl.class) { + if (fallbackSharedExecutor == null) { + fallbackSharedExecutor = Executors.newSingleThreadScheduledExecutor(); + } + } + return new ClientContextImpl(context, fallbackSharedExecutor, null); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Components.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Components.java new file mode 100644 index 0000000..05adbbf --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Components.java @@ -0,0 +1,457 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.server.ComponentsImpl.EventProcessorBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.HooksConfigurationBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.HttpBasicAuthentication; +import com.launchdarkly.sdk.server.ComponentsImpl.HttpConfigurationBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.InMemoryDataStoreFactory; +import com.launchdarkly.sdk.server.ComponentsImpl.LoggingConfigurationBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.NullDataSourceFactory; +import com.launchdarkly.sdk.server.ComponentsImpl.PersistentDataStoreBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.PollingDataSourceBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.ServiceEndpointsBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.StreamingDataSourceBuilderImpl; +import com.launchdarkly.sdk.server.ComponentsImpl.WrapperInfoBuilderImpl; +import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder; +import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder; +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder; +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder; +import com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import static com.launchdarkly.sdk.server.ComponentsImpl.NOOP_EVENT_PROCESSOR_FACTORY; + +/** + * Provides configurable factories for the standard implementations of LaunchDarkly component interfaces. + *

+ * Some of the configuration options in {@link LDConfig.Builder} affect the entire SDK, but others are + * specific to one area of functionality, such as how the SDK receives feature flag updates or processes + * analytics events. For the latter, the standard way to specify a configuration is to call one of the + * static methods in {@link Components} (such as {@link #streamingDataSource()}), apply any desired + * configuration change to the object that that method returns (such as {@link StreamingDataSourceBuilder#initialReconnectDelay(java.time.Duration)}, + * and then use the corresponding method in {@link LDConfig.Builder} (such as {@link LDConfig.Builder#dataSource(ComponentConfigurer)}) + * to use that configured component in the SDK. + * + * @since 4.0.0 + */ +public abstract class Components { + private Components() {} + + /** + * Returns a configuration builder for the SDK's Big Segments feature. + *

+ * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation. + *

+ * After configuring this object, use + * {@link LDConfig.Builder#bigSegments(ComponentConfigurer)} to store it in your SDK + * configuration. For example, using the Redis integration: + * + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .bigSegments(Components.bigSegments(Redis.dataStore().prefix("app1"))
+   *             .userCacheSize(2000))
+   *         .build();
+   * 
+ * + *

+ * You must always specify the {@code storeFactory} parameter, to tell the SDK what database you + * are using. Several database integrations exist for the LaunchDarkly SDK, each with its own + * behavior and options specific to that database; this is described via some implementation of + * {@link BigSegmentStore}. The {@link BigSegmentsConfigurationBuilder} adds configuration + * options for aspects of SDK behavior that are independent of the database. In the example above, + * {@code prefix} is an option specifically for the Redis integration, whereas + * {@code userCacheSize} is an option that can be used for any data store type. + * + * @param storeConfigurer the factory for the underlying data store + * @return a {@link BigSegmentsConfigurationBuilder} + * @since 5.7.0 + * @see Components#bigSegments(ComponentConfigurer) + */ + public static BigSegmentsConfigurationBuilder bigSegments(ComponentConfigurer storeConfigurer) { + return new BigSegmentsConfigurationBuilder(storeConfigurer); + } + + /** + * Returns a configuration object for using the default in-memory implementation of a data store. + *

+ * Since it is the default, you do not normally need to call this method, unless you need to create + * a data store instance for testing purposes. + * + * @return a factory object + * @see LDConfig.Builder#dataStore(ComponentConfigurer) + * @since 4.12.0 + */ + public static ComponentConfigurer inMemoryDataStore() { + return InMemoryDataStoreFactory.INSTANCE; + } + + /** + * Returns a configuration builder for some implementation of a persistent data store. + *

+ * This method is used in conjunction with another factory object provided by specific components + * such as the Redis integration. The latter provides builder methods for options that are specific + * to that integration, while the {@link PersistentDataStoreBuilder} provides options that are + * applicable to any persistent data store (such as caching). For example: + * + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataStore(
+   *             Components.persistentDataStore(
+   *                 Redis.dataStore().url("redis://my-redis-host")
+   *             ).cacheSeconds(15)
+   *         )
+   *         .build();
+   * 
+ * + * See {@link PersistentDataStoreBuilder} for more on how this method is used. + *

+ * For more information on the available persistent data store implementations, see the reference + * guide on Using a persistent feature store. + * + * @param storeConfigurer the factory/builder for the specific kind of persistent data store + * @return a {@link PersistentDataStoreBuilder} + * @see LDConfig.Builder#dataStore(ComponentConfigurer) + * @since 4.12.0 + */ + public static PersistentDataStoreBuilder persistentDataStore(ComponentConfigurer storeConfigurer) { + return new PersistentDataStoreBuilderImpl(storeConfigurer); + } + + /** + * Returns a configuration builder for analytics event delivery. + *

+ * The default configuration has events enabled with default settings. If you want to + * customize this behavior, call this method to obtain a builder, change its properties + * with the {@link EventProcessorBuilder} properties, and pass it to {@link LDConfig.Builder#events(ComponentConfigurer)}: + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .events(Components.sendEvents().capacity(5000).flushIntervalSeconds(2))
+   *         .build();
+   * 
+ * To completely disable sending analytics events, use {@link #noEvents()} instead. + *

+ * Setting {@link LDConfig.Builder#offline(boolean)} to {@code true} will supersede this setting and completely + * disable network requests. + * + * @return a builder for setting streaming connection properties + * @see #noEvents() + * @see LDConfig.Builder#events + * @since 4.12.0 + */ + public static EventProcessorBuilder sendEvents() { + return new EventProcessorBuilderImpl(); + } + + /** + * Returns a configuration object that disables analytics events. + *

+ * Passing this to {@link LDConfig.Builder#events(ComponentConfigurer)} causes the SDK + * to discard all analytics events and not send them to LaunchDarkly, regardless of any other configuration. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .events(Components.noEvents())
+   *         .build();
+   * 
+ * + * @return a factory object + * @see #sendEvents() + * @see LDConfig.Builder#events(ComponentConfigurer) + * @since 4.12.0 + */ + public static ComponentConfigurer noEvents() { + return NOOP_EVENT_PROCESSOR_FACTORY; + } + + /** + * Returns a configurable factory for using streaming mode to get feature flag data. + *

+ * By default, the SDK uses a streaming connection to receive feature flag data from LaunchDarkly. To use the + * default behavior, you do not need to call this method. However, if you want to customize the behavior of + * the connection, call this method to obtain a builder, change its properties with the + * {@link StreamingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(ComponentConfigurer)}: + *

 
+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataSource(Components.streamingDataSource().initialReconnectDelayMillis(500))
+   *         .build();
+   * 
+ *

+ * Setting {@link LDConfig.Builder#offline(boolean)} to {@code true} will supersede this setting and completely + * disable network requests. + * + * @return a builder for setting streaming connection properties + * @see LDConfig.Builder#dataSource(ComponentConfigurer) + * @since 4.12.0 + */ + public static StreamingDataSourceBuilder streamingDataSource() { + return new StreamingDataSourceBuilderImpl(); + } + + /** + * Returns a configurable factory for using polling mode to get feature flag data. + *

+ * This is not the default behavior; by default, the SDK uses a streaming connection to receive feature flag + * data from LaunchDarkly. In polling mode, the SDK instead makes a new HTTP request to LaunchDarkly at regular + * intervals. HTTP caching allows it to avoid redundantly downloading data if there have been no changes, but + * polling is still less efficient than streaming and should only be used on the advice of LaunchDarkly support. + *

+ * To use polling mode, call this method to obtain a builder, change its properties with the + * {@link PollingDataSourceBuilder} methods, and pass it to {@link LDConfig.Builder#dataSource(ComponentConfigurer)}: + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataSource(Components.pollingDataSource().pollIntervalMillis(45000))
+   *         .build();
+   * 
+ *

+ * Setting {@link LDConfig.Builder#offline(boolean)} to {@code true} will supersede this setting and completely + * disable network requests. + * + * @return a builder for setting polling properties + * @see LDConfig.Builder#dataSource(ComponentConfigurer) + * @since 4.12.0 + */ + public static PollingDataSourceBuilder pollingDataSource() { + return new PollingDataSourceBuilderImpl(); + } + + // For testing only - allows us to override the minimum polling interval + static PollingDataSourceBuilderImpl pollingDataSourceInternal() { + return new PollingDataSourceBuilderImpl(); + } + + /** + * Returns a configuration object that disables a direct connection with LaunchDarkly for feature flag updates. + *

+ * Passing this to {@link LDConfig.Builder#dataSource(ComponentConfigurer)} causes the SDK + * not to retrieve feature flag data from LaunchDarkly, regardless of any other configuration. + * This is normally done if you are using the Relay Proxy + * in "daemon mode", where an external process-- the Relay Proxy-- connects to LaunchDarkly and populates + * a persistent data store with the feature flag data. The data store could also be populated by + * another process that is running the LaunchDarkly SDK. If there is no external process updating + * the data store, then the SDK will not have any feature flag data and will return application + * default values only. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataSource(Components.externalUpdatesOnly())
+   *         .dataStore(Components.persistentDataStore(Redis.dataStore())) // assuming the Relay Proxy is using Redis
+   *         .build();
+   * 
+ * + * @return a factory object + * @since 4.12.0 + * @see LDConfig.Builder#dataSource(ComponentConfigurer) + */ + public static ComponentConfigurer externalUpdatesOnly() { + return NullDataSourceFactory.INSTANCE; + } + + /** + * Returns a configuration builder for the SDK's networking configuration. + *

+ * Passing this to {@link LDConfig.Builder#http(ComponentConfigurer)} + * applies this configuration to all HTTP/HTTPS requests made by the SDK. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .http(
+   *              Components.httpConfiguration()
+   *                  .connectTimeoutMillis(3000)
+   *                  .proxyHostAndPort("my-proxy", 8080)
+   *         )
+   *         .build();
+   * 
+ * + * @return a factory object + * @since 4.13.0 + * @see LDConfig.Builder#http(ComponentConfigurer) + */ + public static HttpConfigurationBuilder httpConfiguration() { + return new HttpConfigurationBuilderImpl(); + } + + /** + * Configures HTTP basic authentication, for use with a proxy server. + *

+   *     LDConfig config = new LDConfig.Builder()
+   *         .http(
+   *              Components.httpConfiguration()
+   *                  .proxyHostAndPort("my-proxy", 8080)
+   *                  .proxyAuthentication(Components.httpBasicAuthentication("username", "password"))
+   *         )
+   *         .build();
+   * 
+ * + * @param username the username + * @param password the password + * @return the basic authentication strategy + * @since 4.13.0 + * @see HttpConfigurationBuilder#proxyAuth(HttpAuthentication) + */ + public static HttpAuthentication httpBasicAuthentication(String username, String password) { + return new HttpBasicAuthentication(username, password); + } + + /** + * Returns a configuration builder for the SDK's logging configuration. + *

+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)}, + * after setting any desired properties on the builder, applies this configuration to the SDK. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .logging(
+   *              Components.logging()
+   *                  .logDataSourceOutageAsErrorAfter(Duration.ofSeconds(120))
+   *         )
+   *         .build();
+   * 
+ * + * @return a configuration builder + * @since 5.0.0 + * @see LDConfig.Builder#logging(ComponentConfigurer) + */ + public static LoggingConfigurationBuilder logging() { + return new LoggingConfigurationBuilderImpl(); + } + + /** + * Returns a configuration builder for the SDK's logging configuration, specifying the + * implementation of logging to use. + *

+ * This is a shortcut for Components.logging().adapter(logAdapter). The + * com.launchdarkly.logging + * API defines the {@link LDLogAdapter} interface to specify where log output should be sent. + *

+ * The default logging destination, if no adapter is specified, depends on whether + * SLF4J is present in the classpath. If it is, then the SDK uses + * {@link com.launchdarkly.logging.LDSLF4J#adapter()}, causing output to go to SLF4J; what happens to + * the output then is determined by the SLF4J configuration. If SLF4J is not present in the classpath, + * the SDK uses {@link Logs#toConsole()} instead, causing output to go to the {@code System.err} stream. + *

+ * You may use the {@link com.launchdarkly.logging.Logs} factory methods, or a custom implementation, + * to handle log output differently. For instance, you may specify + * {@link com.launchdarkly.logging.Logs#toJavaUtilLogging()} to use the java.util.logging + * framework. + *

+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)}, + * after setting any desired properties on the builder, applies this configuration to the SDK. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .logging(
+   *              Components.logging(Logs.basic())
+   *         )
+   *         .build();
+   * 
+ * + * @param logAdapter the log adapter + * @return a configuration builder + * @since 5.10.0 + * @see LDConfig.Builder#logging(ComponentConfigurer) + * @see LoggingConfigurationBuilder#adapter(LDLogAdapter) + */ + public static LoggingConfigurationBuilder logging(LDLogAdapter logAdapter) { + return logging().adapter(logAdapter); + } + + /** + * Returns a configuration builder that turns off SDK logging. + *

+ * Passing this to {@link LDConfig.Builder#logging(ComponentConfigurer)} + * applies this configuration to the SDK. + *

+ * It is equivalent to Components.logging(com.launchdarkly.logging.Logs.none()). + * + * @return a configuration builder + * @since 5.10.0 + */ + public static LoggingConfigurationBuilder noLogging() { + return logging().adapter(Logs.none()); + } + + /** + * Returns a configuration builder for the SDK's application metadata. + *

+ * Passing this to {@link LDConfig.Builder#applicationInfo(com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder)}, + * after setting any desired properties on the builder, applies this configuration to the SDK. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .applicationInfo(
+   *             Components.applicationInfo()
+   *                 .applicationId("authentication-service")
+   *                 .applicationVersion("1.0.0")
+   *         )
+   *         .build();
+   * 
+ * + * @return a builder object + * @since 5.8.0 + * @see LDConfig.Builder#applicationInfo(com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder) + */ + public static ApplicationInfoBuilder applicationInfo() { + return new ApplicationInfoBuilder(); + } + + /** + * Returns a builder for configuring custom service URIs. + *

+ * Passing this to {@link LDConfig.Builder#serviceEndpoints(com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder)}, + * after setting any desired properties on the builder, applies this configuration to the SDK. + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .serviceEndpoints(
+   *             Components.serviceEndpoints()
+   *                 .relayProxy("http://my-relay-hostname:80")
+   *         )
+   *         .build();
+   * 
+ * + * @return a builder object + * @since 5.9.0 + * @see LDConfig.Builder#serviceEndpoints(com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder) + */ + public static ServiceEndpointsBuilder serviceEndpoints() { + return new ServiceEndpointsBuilderImpl(); + } + + /** + * Returns a builder for configuring hooks. + * + * Passing this to {@link LDConfig.Builder#hooks(com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder)}, + * after setting any desired hooks on the builder, applies this configuration to the SDK. + *

+   *     List hooks = myCreateHooksFunc();
+   *     LDConfig config = new LDConfig.Builder()
+   *         .hooks(
+   *             Components.hooks()
+   *                 .setHooks(hooks)
+   *         )
+   *         .build();
+   * 
+ * @return a {@link HooksConfigurationBuilder} that can be used for customization + */ + public static HooksConfigurationBuilder hooks() { + return new HooksConfigurationBuilderImpl(); + } + + /** + * Returns a wrapper information builder. + *

+ * This is intended for use by LaunchDarkly in the development of wrapper SDKs. + * + * @return a builder object + * @since 7.1.0 + */ + public static WrapperInfoBuilder wrapperInfo() { return new WrapperInfoBuilderImpl(); } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java new file mode 100644 index 0000000..0433a28 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ComponentsImpl.java @@ -0,0 +1,497 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LDSLF4J; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.DefaultEventSender; +import com.launchdarkly.sdk.internal.events.DiagnosticConfigProperty; +import com.launchdarkly.sdk.internal.events.EventSender; +import com.launchdarkly.sdk.internal.events.EventsConfiguration; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder; +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder; +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder; +import com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; +import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints; +import com.launchdarkly.sdk.server.interfaces.WrapperInfo; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DiagnosticDescription; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.HookConfiguration; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; +import okhttp3.Credentials; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.URI; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; + +/** + * This class contains the package-private implementations of component factories and builders whose + * public factory methods are in {@link Components}. + */ +abstract class ComponentsImpl { + private ComponentsImpl() {} + + static final class InMemoryDataStoreFactory implements ComponentConfigurer, DiagnosticDescription { + static final InMemoryDataStoreFactory INSTANCE = new InMemoryDataStoreFactory(); + + @Override + public DataStore build(ClientContext context) { + return new InMemoryDataStore(); + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return LDValue.of("memory"); + } + } + + static final EventProcessor NOOP_EVENT_PROCESSOR = new NoOpEventProcessor(); + static final ComponentConfigurer NOOP_EVENT_PROCESSOR_FACTORY = context -> NOOP_EVENT_PROCESSOR; + + static final class NullDataSourceFactory implements ComponentConfigurer, DiagnosticDescription { + static final NullDataSourceFactory INSTANCE = new NullDataSourceFactory(); + + @Override + public DataSource build(ClientContext context) { + LDLogger logger = context.getBaseLogger(); + if (context.isOffline()) { + // If they have explicitly called offline(true) to disable everything, we'll log this slightly + // more specific message. + logger.info("Starting LaunchDarkly client in offline mode"); + } else { + logger.info("LaunchDarkly client will not connect to Launchdarkly for feature flag data"); + } + context.getDataSourceUpdateSink().updateStatus(DataSourceStatusProvider.State.VALID, null); + return NullDataSource.INSTANCE; + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + // The difference between "offline" and "using the Relay daemon" is irrelevant from the data source's + // point of view, but we describe them differently in diagnostic events. This is easy because if we were + // configured to be completely offline... we wouldn't be sending any diagnostic events. Therefore, if + // Components.externalUpdatesOnly() was specified as the data source and we are sending a diagnostic + // event, we can assume usingRelayDaemon should be true. + return LDValue.buildObject() + .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name, false) + .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name, false) + .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, false) + .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, true) + .build(); + } + } + + // Package-private for visibility in tests + static final class NullDataSource implements DataSource { + static final DataSource INSTANCE = new NullDataSource(); + @Override + public Future start() { + return CompletableFuture.completedFuture(null); + } + + @Override + public boolean isInitialized() { + return true; + } + + @Override + public void close() throws IOException {} + } + + static final class StreamingDataSourceBuilderImpl extends StreamingDataSourceBuilder + implements DiagnosticDescription { + @Override + public DataSource build(ClientContext context) { + LDLogger baseLogger = context.getBaseLogger(); + LDLogger logger = baseLogger.subLogger(Loggers.DATA_SOURCE_LOGGER_NAME); + logger.info("Enabling streaming API"); + + URI streamUri = StandardEndpoints.selectBaseUri( + context.getServiceEndpoints().getStreamingBaseUri(), + StandardEndpoints.DEFAULT_STREAMING_BASE_URI, + "streaming", + baseLogger); + + return new StreamProcessor( + toHttpProperties(context.getHttp()), + context.getDataSourceUpdateSink(), + context.getThreadPriority(), + ClientContextImpl.get(context).diagnosticStore, + streamUri, + payloadFilter, + initialReconnectDelay, + logger); + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return LDValue.buildObject() + .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, false) + .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name, false) + .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name, + StandardEndpoints.isCustomBaseUri( + clientContext.getServiceEndpoints().getStreamingBaseUri(), + StandardEndpoints.DEFAULT_STREAMING_BASE_URI)) + .put(DiagnosticConfigProperty.RECONNECT_TIME_MILLIS.name, initialReconnectDelay.toMillis()) + .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, false) + .build(); + } + } + + static final class PollingDataSourceBuilderImpl extends PollingDataSourceBuilder implements DiagnosticDescription { + // for testing only + PollingDataSourceBuilderImpl pollIntervalWithNoMinimum(Duration pollInterval) { + this.pollInterval = pollInterval; + return this; + } + + @Override + public DataSource build(ClientContext context) { + LDLogger baseLogger = context.getBaseLogger(); + LDLogger logger = baseLogger.subLogger(Loggers.DATA_SOURCE_LOGGER_NAME); + + logger.info("Disabling streaming API"); + logger.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support"); + + URI pollUri = StandardEndpoints.selectBaseUri( + context.getServiceEndpoints().getPollingBaseUri(), + StandardEndpoints.DEFAULT_POLLING_BASE_URI, + "polling", + baseLogger); + + DefaultFeatureRequestor requestor = new DefaultFeatureRequestor( + toHttpProperties(context.getHttp()), + pollUri, + payloadFilter, + logger); + + return new PollingProcessor( + requestor, + context.getDataSourceUpdateSink(), + ClientContextImpl.get(context).sharedExecutor, + pollInterval, + logger); + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return LDValue.buildObject() + .put(DiagnosticConfigProperty.STREAMING_DISABLED.name, true) + .put(DiagnosticConfigProperty.CUSTOM_BASE_URI.name, + StandardEndpoints.isCustomBaseUri( + clientContext.getServiceEndpoints().getPollingBaseUri(), + StandardEndpoints.DEFAULT_POLLING_BASE_URI)) + .put(DiagnosticConfigProperty.CUSTOM_STREAM_URI.name, false) + .put(DiagnosticConfigProperty.POLLING_INTERVAL_MILLIS.name, pollInterval.toMillis()) + .put(DiagnosticConfigProperty.USING_RELAY_DAEMON.name, false) + .build(); + } + } + + static final class EventProcessorBuilderImpl extends EventProcessorBuilder + implements DiagnosticDescription { + @Override + public EventProcessor build(ClientContext context) { + EventSender eventSender; + if (eventSenderConfigurer == null) { + eventSender = new DefaultEventSender( + toHttpProperties(context.getHttp()), + null, // use default request path for server-side events + null, // use default request path for client-side events + 0, // 0 means default retry delay + context.getBaseLogger().subLogger(Loggers.EVENTS_LOGGER_NAME)); + } else { + eventSender = new EventSenderWrapper(eventSenderConfigurer.build(context)); + } + URI eventsUri = StandardEndpoints.selectBaseUri( + context.getServiceEndpoints().getEventsBaseUri(), + StandardEndpoints.DEFAULT_EVENTS_BASE_URI, + "events", + context.getBaseLogger()); + EventsConfiguration eventsConfig = new EventsConfiguration( + allAttributesPrivate, + capacity, + new ServerSideEventContextDeduplicator(userKeysCapacity, userKeysFlushInterval), + diagnosticRecordingInterval.toMillis(), + ClientContextImpl.get(context).diagnosticStore, + eventSender, + EventsConfiguration.DEFAULT_EVENT_SENDING_THREAD_POOL_SIZE, + eventsUri, + flushInterval.toMillis(), + false, + false, + privateAttributes); + return new DefaultEventProcessorWrapper(context, eventsConfig); + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return LDValue.buildObject() + .put(DiagnosticConfigProperty.ALL_ATTRIBUTES_PRIVATE.name, allAttributesPrivate) + .put(DiagnosticConfigProperty.CUSTOM_EVENTS_URI.name, + StandardEndpoints.isCustomBaseUri( + clientContext.getServiceEndpoints().getEventsBaseUri(), + StandardEndpoints.DEFAULT_EVENTS_BASE_URI)) + .put(DiagnosticConfigProperty.DIAGNOSTIC_RECORDING_INTERVAL_MILLIS.name, diagnosticRecordingInterval.toMillis()) + .put(DiagnosticConfigProperty.EVENTS_CAPACITY.name, capacity) + .put(DiagnosticConfigProperty.EVENTS_FLUSH_INTERVAL_MILLIS.name, flushInterval.toMillis()) + .put(DiagnosticConfigProperty.SAMPLING_INTERVAL.name, 0) + .put(DiagnosticConfigProperty.USER_KEYS_CAPACITY.name, userKeysCapacity) + .put(DiagnosticConfigProperty.USER_KEYS_FLUSH_INTERVAL_MILLIS.name, userKeysFlushInterval.toMillis()) + .build(); + } + + static final class EventSenderWrapper implements EventSender { + private final com.launchdarkly.sdk.server.subsystems.EventSender wrappedSender; + + EventSenderWrapper(com.launchdarkly.sdk.server.subsystems.EventSender wrappedSender) { + this.wrappedSender = wrappedSender; + } + + @Override + public void close() throws IOException { + wrappedSender.close(); + } + + @Override + public Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri) { + return transformResult(wrappedSender.sendAnalyticsEvents(data, eventCount, eventsBaseUri)); + } + + @Override + public Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri) { + return transformResult(wrappedSender.sendDiagnosticEvent(data, eventsBaseUri)); + } + + private Result transformResult(com.launchdarkly.sdk.server.subsystems.EventSender.Result result) { + switch (result) { + case FAILURE: + return new Result(false, false, null); + case STOP: + return new Result(false, true, null); + default: + return new Result(true, false, null); + } + } + } + } + + static final class HttpConfigurationBuilderImpl extends HttpConfigurationBuilder { + @Override + public HttpConfiguration build(ClientContext clientContext) { + LDLogger logger = clientContext.getBaseLogger(); + + // Build the default headers + Map headers = new HashMap<>(); + headers.put("Authorization", clientContext.getSdkKey()); + headers.put("User-Agent", "JavaClient/" + Version.SDK_VERSION); + + if (clientContext.getApplicationInfo() != null) { + String tagHeader = Util.applicationTagHeader(clientContext.getApplicationInfo(), logger); + if (!tagHeader.isEmpty()) { + headers.put("X-LaunchDarkly-Tags", tagHeader); + } + } + + String wrapperNameToUse = null; + String wrapperVersionToUse = null; + + WrapperInfo wrapperInfo = clientContext.getWrapperInfo(); + // If information from wrapperInfo is available, then it overwrites that from the http properties + // builder. + if(wrapperInfo != null) { + wrapperNameToUse = wrapperInfo.getWrapperName(); + wrapperVersionToUse = wrapperInfo.getWrapperVersion(); + } + else if (wrapperName != null) { + wrapperNameToUse = wrapperName; + wrapperVersionToUse = wrapperVersion; + } + + if(wrapperNameToUse != null) { + String wrapperId = wrapperVersionToUse == null ? wrapperNameToUse : (wrapperNameToUse + "/" + wrapperVersionToUse); + headers.put("X-LaunchDarkly-Wrapper", wrapperId); + } + + if (!customHeaders.isEmpty()) { + headers.putAll(customHeaders); + } + + Proxy proxy = proxyHost == null ? null : new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); + + return new HttpConfiguration( + connectTimeout, + headers, + proxy, + proxyAuth, + socketFactory, + socketTimeout, + sslSocketFactory, + trustManager); + } + } + + static final class HttpBasicAuthentication implements HttpAuthentication { + private final String username; + private final String password; + + HttpBasicAuthentication(String username, String password) { + this.username = username; + this.password = password; + } + + @Override + public String provideAuthorization(Iterable challenges) { + return Credentials.basic(username, password); + } + } + + static final class PersistentDataStoreBuilderImpl extends PersistentDataStoreBuilder implements DiagnosticDescription { + public PersistentDataStoreBuilderImpl(ComponentConfigurer storeConfigurer) { + super(storeConfigurer); + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + if (persistentDataStoreConfigurer instanceof DiagnosticDescription) { + return ((DiagnosticDescription) persistentDataStoreConfigurer).describeConfiguration(clientContext); + } + return LDValue.of("custom"); + } + + @Override + public DataStore build(ClientContext context) { + PersistentDataStore core = persistentDataStoreConfigurer.build(context); + return new PersistentDataStoreWrapper( + core, + cacheTime, + staleValuesPolicy, + recordCacheStats, + context.getDataStoreUpdateSink(), + ClientContextImpl.get(context).sharedExecutor, + context.getBaseLogger().subLogger(Loggers.DATA_STORE_LOGGER_NAME)); + } + } + + static final class LoggingConfigurationBuilderImpl extends LoggingConfigurationBuilder { + @Override + public LoggingConfiguration build(ClientContext clientContext) { + LDLogAdapter adapter = logAdapter == null ? getDefaultLogAdapter() : logAdapter; + LDLogAdapter filteredAdapter = Logs.level(adapter, + minimumLevel == null ? LDLogLevel.INFO : minimumLevel); + // If the adapter is for a framework like SLF4J or java.util.logging that has its own external + // configuration system, then calling Logs.level here has no effect and filteredAdapter will be + // just the same as adapter. + String name = baseName == null ? Loggers.BASE_LOGGER_NAME : baseName; + return new LoggingConfiguration(name, filteredAdapter, logDataSourceOutageAsErrorAfter); + } + + private static LDLogAdapter getDefaultLogAdapter() { + // If SLF4J is present in the classpath, use that by default; otherwise use the console. + try { + Class.forName("org.slf4j.LoggerFactory"); + return LDSLF4J.adapter(); + } catch (ClassNotFoundException e) { + return Logs.toConsole(); + } + } + } + + static final class ServiceEndpointsBuilderImpl extends ServiceEndpointsBuilder { + @Override + public ServiceEndpoints createServiceEndpoints() { + // If *any* custom URIs have been set, then we do not want to use default values for any that were not set, + // so we will leave those null. That way, if we decide later on (in other component factories, such as + // EventProcessorBuilder) that we are actually interested in one of these values, and we + // see that it is null, we can assume that there was a configuration mistake and log an + // error. + if (streamingBaseUri == null && pollingBaseUri == null && eventsBaseUri == null) { + return new ServiceEndpoints( + StandardEndpoints.DEFAULT_STREAMING_BASE_URI, + StandardEndpoints.DEFAULT_POLLING_BASE_URI, + StandardEndpoints.DEFAULT_EVENTS_BASE_URI); + } + return new ServiceEndpoints(streamingBaseUri, pollingBaseUri, eventsBaseUri); + } + + public static ServiceEndpointsBuilderImpl fromServiceEndpoints(ServiceEndpoints endpoints) { + ServiceEndpointsBuilderImpl newBuilder = new ServiceEndpointsBuilderImpl(); + newBuilder.eventsBaseUri = endpoints.getEventsBaseUri(); + newBuilder.pollingBaseUri = endpoints.getPollingBaseUri(); + newBuilder.streamingBaseUri = endpoints.getStreamingBaseUri(); + return newBuilder; + } + } + + static HttpProperties toHttpProperties(HttpConfiguration httpConfig) { + okhttp3.Authenticator proxyAuth = null; + if (httpConfig.getProxyAuthentication() != null) { + proxyAuth = Util.okhttpAuthenticatorFromHttpAuthStrategy(httpConfig.getProxyAuthentication()); + } + return new HttpProperties( + httpConfig.getConnectTimeout().toMillis(), + ImmutableMap.copyOf(httpConfig.getDefaultHeaders()), + null, + httpConfig.getProxy(), + proxyAuth, + httpConfig.getSocketFactory(), + httpConfig.getSocketTimeout().toMillis(), + httpConfig.getSslSocketFactory(), + httpConfig.getTrustManager()); + } + + static final class HooksConfigurationBuilderImpl extends HooksConfigurationBuilder { + + public static HooksConfigurationBuilderImpl fromHooksConfiguration(HookConfiguration hooksConfiguration) { + HooksConfigurationBuilderImpl builder = new HooksConfigurationBuilderImpl(); + builder.setHooks(hooksConfiguration.getHooks()); + return builder; + } + + @Override + public HookConfiguration build() { + return new HookConfiguration(hooks); + } + } + + static final class WrapperInfoBuilderImpl extends WrapperInfoBuilder { + public WrapperInfoBuilderImpl() { + this(null, null); + } + public WrapperInfoBuilderImpl(String wrapperName, String wrapperVersion) { + this.wrapperName = wrapperName; + this.wrapperVersion = wrapperVersion; + } + + public WrapperInfo build() { + return new WrapperInfo(wrapperName, wrapperVersion); + } + + public static WrapperInfoBuilderImpl fromInfo(WrapperInfo info) { + return new WrapperInfoBuilderImpl(info.getWrapperName(), info.getWrapperVersion()); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModel.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModel.java new file mode 100644 index 0000000..1e919cf --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModel.java @@ -0,0 +1,733 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.gson.annotations.JsonAdapter; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed; +import com.launchdarkly.sdk.server.DataModelPreprocessing.FlagPreprocessed; +import com.launchdarkly.sdk.server.DataModelPreprocessing.FlagRulePreprocessed; +import com.launchdarkly.sdk.server.DataModelPreprocessing.PrerequisitePreprocessed; +import com.launchdarkly.sdk.server.DataModelPreprocessing.TargetPreprocessed; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptySet; + +// IMPLEMENTATION NOTES: +// +// - FeatureFlag, Segment, and all other data model classes contained within them, must be package-private. +// We don't want application code to see these types, because we need to be free to change their details without +// breaking the application. +// +// - We expose our DataKind instances publicly because application code may need to reference them if it is +// implementing a custom component such as a data store. But beyond the mere fact of there being these kinds of +// data, applications should not be considered with their structure. +// +// - For classes that can be deserialized from JSON, if we are relying on Gson's reflective behavior (i.e. if +// the class does not have a custom TypeAdapter), there must be an empty constructor, and the fields cannot +// be final. This is because of how Gson works: it creates an instance first, then sets the fields; that also +// means we cannot do any transformation/validation of the fields in the constructor. But if we have a custom +// deserializer, then we should use final fields. +// +// - In any case, there should be a constructor that takes all the fields; we should use that whenever we need +// to create these objects programmatically (so that if we are able at some point to make the fields final, that +// won't break anything). +// +// - For properties that have a collection type such as List, we should ensure that a null is always changed to +// an empty list (in the constructor, if the field can be made final; otherwise in the getter). Semantically +// there is no difference in the data model between an empty list and a null list, and in some languages +// (particularly Go) it is easy for an uninitialized list to be serialized to JSON as null. +// +// - Some classes have a "preprocessed" field containing types defined in DataModelPreprocessing. These fields +// must always be marked transient, so Gson will not serialize them. They are populated when we deserialize a +// FeatureFlag or Segment, because those types implement JsonHelpers.PostProcessingDeserializable (the +// afterDeserialized() method). + +/** + * Contains information about the internal data model for feature flags and user segments. + *

+ * The details of the data model are not public to application code (although of course developers can easily + * look at the code or the data) so that changes to LaunchDarkly SDK implementation details will not be breaking + * changes to the application. Therefore, most of the members of this class are package-private. The public + * members provide a high-level description of model objects so that custom integration code or test code can + * store or serialize them. + */ +public abstract class DataModel { + private DataModel() {} + + /** + * The {@link DataKind} instance that describes feature flag data. + *

+ * Applications should not need to reference this object directly. It is public so that custom integrations + * and test code can serialize or deserialize data or inject it into a data store. + */ + public static DataKind FEATURES = new DataKind("features", + DataModel::serializeItem, + s -> deserializeItem(s, FeatureFlag.class)); + + /** + * The {@link DataKind} instance that describes user segment data. + *

+ * Applications should not need to reference this object directly. It is public so that custom integrations + * and test code can serialize or deserialize data or inject it into a data store. + */ + public static DataKind SEGMENTS = new DataKind("segments", + DataModel::serializeItem, + s -> deserializeItem(s, Segment.class)); + + /** + * An enumeration of all supported {@link DataKind} types. + *

+ * Applications should not need to reference this object directly. It is public so that custom data store + * implementations can determine ahead of time what kinds of model objects may need to be stored, if + * necessary. + */ + public static Iterable ALL_DATA_KINDS = ImmutableList.of(FEATURES, SEGMENTS); + + private static ItemDescriptor deserializeItem(String s, Class itemClass) { + VersionedData o = JsonHelpers.deserialize(s, itemClass); + return o.isDeleted() ? ItemDescriptor.deletedItem(o.getVersion()) : new ItemDescriptor(o.getVersion(), o); + } + + private static String serializeItem(ItemDescriptor item) { + Object o = item.getItem(); + if (o != null) { + return JsonHelpers.serialize(o); + } + return "{\"version\":" + item.getVersion() + ",\"deleted\":true}"; + } + + // All of these inner data model classes should have package-private scope. They should have only property + // accessors; the evaluator logic is in Evaluator, EvaluatorBucketing, and EvaluatorOperators. + + /** + * Common interface for FeatureFlag and Segment, for convenience in accessing their common properties. + * @since 3.0.0 + */ + interface VersionedData { + String getKey(); + int getVersion(); + /** + * True if this is a placeholder for a deleted item. + * @return true if deleted + */ + boolean isDeleted(); + } + + @JsonAdapter(JsonHelpers.PostProcessingDeserializableTypeAdapterFactory.class) + static final class FeatureFlag implements VersionedData, JsonHelpers.PostProcessingDeserializable { + private String key; + private int version; + private boolean on; + private List prerequisites; + private String salt; + private List targets; + private List contextTargets; + private List rules; + private VariationOrRollout fallthrough; + private Integer offVariation; //optional + private List variations; + private boolean clientSide; + private boolean trackEvents; + private boolean trackEventsFallthrough; + private Long debugEventsUntilDate; + private boolean deleted; + private Long samplingRatio; + private Migration migration; + private boolean excludeFromSummaries; + + /** + * Container for migration specific flag data. + */ + static class Migration { + Migration() {} + + Migration(Long checkRatio) { + this.checkRatio = checkRatio; + } + private Long checkRatio; + + public Long getCheckRatio() { + return checkRatio; + } + } + + transient FlagPreprocessed preprocessed; + + // We need this so Gson doesn't complain in certain java environments that restrict unsafe allocation + FeatureFlag() {} + + FeatureFlag(String key, int version, boolean on, List prerequisites, String salt, List targets, + List contextTargets, List rules, VariationOrRollout fallthrough, Integer offVariation, + List variations, boolean clientSide, boolean trackEvents, boolean trackEventsFallthrough, + Long debugEventsUntilDate, boolean deleted, Long samplingRatio, Migration migration, boolean excludeFromSummaries) { + this.key = key; + this.version = version; + this.on = on; + this.prerequisites = prerequisites; + this.salt = salt; + this.targets = targets; + this.contextTargets = contextTargets; + this.rules = rules; + this.fallthrough = fallthrough; + this.offVariation = offVariation; + this.variations = variations; + this.clientSide = clientSide; + this.trackEvents = trackEvents; + this.trackEventsFallthrough = trackEventsFallthrough; + this.debugEventsUntilDate = debugEventsUntilDate; + this.deleted = deleted; + this.samplingRatio = samplingRatio; + this.migration = migration; + this.excludeFromSummaries = excludeFromSummaries; + } + + public int getVersion() { + return version; + } + + public String getKey() { + return key; + } + + boolean isTrackEvents() { + return trackEvents; + } + + boolean isTrackEventsFallthrough() { + return trackEventsFallthrough; + } + + Long getDebugEventsUntilDate() { + return debugEventsUntilDate; + } + + public boolean isDeleted() { + return deleted; + } + + boolean isOn() { + return on; + } + + List getPrerequisites() { + return prerequisites == null ? emptyList() : prerequisites; + } + + String getSalt() { + return salt; + } + + // Guaranteed non-null + List getTargets() { + return targets == null ? emptyList() : targets; + } + + // Guaranteed non-null + List getContextTargets() { + return contextTargets == null ? emptyList() : contextTargets; + } + + // Guaranteed non-null + List getRules() { + return rules == null ? emptyList() : rules; + } + + VariationOrRollout getFallthrough() { + return fallthrough; + } + + // Guaranteed non-null + List getVariations() { + return variations == null ? emptyList() : variations; + } + + Integer getOffVariation() { + return offVariation; + } + + boolean isClientSide() { + return clientSide; + } + + Long getSamplingRatio() { return samplingRatio; } + + Migration getMigration() { return migration; } + + boolean isExcludeFromSummaries() { + return excludeFromSummaries; + } + + public void afterDeserialized() { + DataModelPreprocessing.preprocessFlag(this); + } + } + + static final class Prerequisite { + private String key; + private int variation; + + transient PrerequisitePreprocessed preprocessed; + + Prerequisite() {} + + Prerequisite(String key, int variation) { + this.key = key; + this.variation = variation; + } + + String getKey() { + return key; + } + + int getVariation() { + return variation; + } + } + + static final class Target { + private ContextKind contextKind; + private Set values; + private int variation; + + transient TargetPreprocessed preprocessed; + + Target() {} + + Target(ContextKind contextKind, Set values, int variation) { + this.contextKind = contextKind; + this.values = values; + this.variation = variation; + } + + ContextKind getContextKind() { + return contextKind; + } + + // Guaranteed non-null + Collection getValues() { + return values == null ? emptySet() : values; + } + + int getVariation() { + return variation; + } + } + + /** + * Expresses a set of AND-ed matching conditions for a user, along with either the fixed variation or percent rollout + * to serve if the conditions match. + * Invariant: one of the variation or rollout must be non-nil. + */ + static final class Rule extends VariationOrRollout { + private String id; + private List clauses; + private boolean trackEvents; + + transient FlagRulePreprocessed preprocessed; + + Rule() { + super(); + } + + Rule(String id, List clauses, Integer variation, Rollout rollout, boolean trackEvents) { + super(variation, rollout); + this.id = id; + this.clauses = clauses; + this.trackEvents = trackEvents; + } + + String getId() { + return id; + } + + // Guaranteed non-null + List getClauses() { + return clauses == null ? emptyList() : clauses; + } + + boolean isTrackEvents() { + return trackEvents; + } + } + + @JsonAdapter(DataModelSerialization.ClauseTypeAdapter.class) + static final class Clause { + private final ContextKind contextKind; + private final AttributeRef attribute; + private final Operator op; + private final List values; //interpreted as an OR of values + private final boolean negate; + + transient ClausePreprocessed preprocessed; + + Clause(ContextKind contextKind, AttributeRef attribute, Operator op, List values, boolean negate) { + this.contextKind = contextKind; + this.attribute = attribute; + this.op = op; + this.values = values == null ? emptyList() : values; + this.negate = negate; + } + + ContextKind getContextKind() { + return contextKind; + } + + AttributeRef getAttribute() { + return attribute; + } + + Operator getOp() { + return op; + } + + // Guaranteed non-null + List getValues() { + return values; + } + + boolean isNegate() { + return negate; + } + } + + @JsonAdapter(DataModelSerialization.RolloutTypeAdapter.class) + static final class Rollout { + private final ContextKind contextKind; + private final List variations; + private final AttributeRef bucketBy; + private final RolloutKind kind; + private final Integer seed; + + Rollout(ContextKind contextKind, List variations, AttributeRef bucketBy, RolloutKind kind, Integer seed) { + this.contextKind = contextKind; + this.variations = variations == null ? emptyList() : variations; + this.bucketBy = bucketBy; + this.kind = kind; + this.seed = seed; + } + + ContextKind getContextKind() { + return contextKind; + } + + // Guaranteed non-null + List getVariations() { + return variations; + } + + AttributeRef getBucketBy() { + return bucketBy; + } + + RolloutKind getKind() { + return this.kind; + } + + Integer getSeed() { + return this.seed; + } + + boolean isExperiment() { + return kind == RolloutKind.experiment; + } + } + + /** + * Contains either a fixed variation or percent rollout to serve. + * Invariant: one of the variation or rollout must be non-nil. + */ + static class VariationOrRollout { + private Integer variation; + private Rollout rollout; + + VariationOrRollout() {} + + VariationOrRollout(Integer variation, Rollout rollout) { + this.variation = variation; + this.rollout = rollout; + } + + Integer getVariation() { + return variation; + } + + Rollout getRollout() { + return rollout; + } + } + + static final class WeightedVariation { + private int variation; + private int weight; + private boolean untracked; + + WeightedVariation() {} + + WeightedVariation(int variation, int weight, boolean untracked) { + this.variation = variation; + this.weight = weight; + this.untracked = untracked; + } + + int getVariation() { + return variation; + } + + int getWeight() { + return weight; + } + + boolean isUntracked() { + return untracked; + } + } + + @JsonAdapter(JsonHelpers.PostProcessingDeserializableTypeAdapterFactory.class) + static final class Segment implements VersionedData, JsonHelpers.PostProcessingDeserializable { + private String key; + private Set included; + private Set excluded; + private List includedContexts; + private List excludedContexts; + private String salt; + private List rules; + private int version; + private boolean deleted; + private boolean unbounded; + private ContextKind unboundedContextKind; + private Integer generation; + + Segment() {} + + Segment(String key, + Set included, + Set excluded, + List includedContexts, + List excludedContexts, + String salt, + List rules, + int version, + boolean deleted, + boolean unbounded, + ContextKind unboundedContextKind, + Integer generation) { + this.key = key; + this.included = included; + this.excluded = excluded; + this.includedContexts = includedContexts; + this.excludedContexts = excludedContexts; + this.salt = salt; + this.rules = rules; + this.version = version; + this.deleted = deleted; + this.unbounded = unbounded; + this.unboundedContextKind = unboundedContextKind; + this.generation = generation; + } + + public String getKey() { + return key; + } + + // Guaranteed non-null + Collection getIncluded() { + return included == null ? emptySet() : included; + } + + // Guaranteed non-null + Collection getExcluded() { + return excluded == null ? emptySet() : excluded; + } + + // Guaranteed non-null + List getIncludedContexts() { + return includedContexts == null ? emptyList() : includedContexts; + } + + // Guaranteed non-null + List getExcludedContexts() { + return excludedContexts == null ? emptyList() : excludedContexts; + } + + String getSalt() { + return salt; + } + + // Guaranteed non-null + List getRules() { + return rules == null ? emptyList() : rules; + } + + public int getVersion() { + return version; + } + + public boolean isDeleted() { + return deleted; + } + + public boolean isUnbounded() { + return unbounded; + } + + public ContextKind getUnboundedContextKind() { + return unboundedContextKind; + } + + public Integer getGeneration() { + return generation; + } + + public void afterDeserialized() { + DataModelPreprocessing.preprocessSegment(this); + } + } + + @JsonAdapter(DataModelSerialization.SegmentRuleTypeAdapter.class) + static final class SegmentRule { + private final List clauses; + private final Integer weight; + private final ContextKind rolloutContextKind; + private final AttributeRef bucketBy; + + SegmentRule(List clauses, Integer weight, ContextKind rolloutContextKind, AttributeRef bucketBy) { + this.clauses = clauses == null ? emptyList() : clauses; + this.weight = weight; + this.rolloutContextKind = rolloutContextKind; + this.bucketBy = bucketBy; + } + + // Guaranteed non-null + List getClauses() { + return clauses; + } + + Integer getWeight() { + return weight; + } + + ContextKind getRolloutContextKind() { + return rolloutContextKind; + } + + AttributeRef getBucketBy() { + return bucketBy; + } + } + + static class SegmentTarget { + private ContextKind contextKind; + private Set values; + + SegmentTarget(ContextKind contextKind, Set values) { + this.contextKind = contextKind; + this.values = values; + } + + ContextKind getContextKind() { + return contextKind; + } + + Set getValues() { // guaranteed non-null + return values == null ? emptySet() : values; + } + } + + /** + * This is an enum-like type rather than an enum because we don't want unrecognized operators to + * cause parsing of the whole JSON environment to fail. The implementation of each operator is in + * EvaluatorOperators. + */ + static class Operator { + private final String name; + private final boolean builtin; + private final int hashCode; + + private static final Map builtins = new HashMap<>(); + + private Operator(String name, boolean builtin) { + this.name = name; + this.builtin = builtin; + + // Precompute the hash code for fast map lookups - String.hashCode() does memoize this value, + // sort of, but we shouldn't have to rely on that + this.hashCode = name.hashCode(); + } + + private static Operator builtin(String name) { + Operator op = new Operator(name, true); + builtins.put(name, op); + return op; + } + + static final Operator in = builtin("in"); + static final Operator startsWith = builtin("startsWith"); + static final Operator endsWith = builtin("endsWith"); + static final Operator matches = builtin("matches"); + static final Operator contains = builtin("contains"); + static final Operator lessThan = builtin("lessThan"); + static final Operator lessThanOrEqual = builtin("lessThanOrEqual"); + static final Operator greaterThan = builtin("greaterThan"); + static final Operator greaterThanOrEqual = builtin("greaterThanOrEqual"); + static final Operator before = builtin("before"); + static final Operator after = builtin("after"); + static final Operator semVerEqual = builtin("semVerEqual"); + static final Operator semVerLessThan = builtin("semVerLessThan"); + static final Operator semVerGreaterThan = builtin("semVerGreaterThan"); + static final Operator segmentMatch = builtin("segmentMatch"); + + static Operator forName(String name) { + // Normally we will only see names that are in the builtins map. Anything else is something + // the SDK doesn't recognize, but we still need to allow it to exist rather than throwing + // an error. + Operator op = builtins.get(name); + return op == null ? new Operator(name, false) : op; + } + + static Iterable getBuiltins() { + return builtins.values(); + } + + String name() { + return name; + } + + @Override + public String toString() { + return name; + } + + @Override + public boolean equals(Object other) { + if (this.builtin) { + // reference equality is OK for the builtin ones, because we intern them + return this == other; + } + return other instanceof Operator && ((Operator)other).name.equals(this.name); + } + + @Override + public int hashCode() { + return hashCode; + } + } + + /** + * This enum is all lowercase so that when it is automatically deserialized from JSON, + * the lowercase properties properly map to these enumerations. + */ + static enum RolloutKind { + rollout, + experiment + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java new file mode 100644 index 0000000..46cb04e --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelDependencies.java @@ -0,0 +1,259 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableSortedMap; +import com.google.common.collect.Iterables; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static com.google.common.collect.Iterables.concat; +import static com.google.common.collect.Iterables.isEmpty; +import static com.google.common.collect.Iterables.transform; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptySet; + +/** + * Implements a dependency graph ordering for data to be stored in a data store. + *

+ * We use this to order the data that we pass to {@link com.launchdarkly.sdk.server.subsystems.DataStore#init(FullDataSet)}, + * and also to determine which flags are affected by a change if the application is listening for flag change events. + *

+ * Dependencies are defined as follows: there is a dependency from flag F to flag G if F is a prerequisite flag for + * G, or transitively for any of G's prerequisites; there is a dependency from flag F to segment S if F contains a + * rule with a segmentMatch clause that uses S. Therefore, if G or S is modified or deleted then F may be affected, + * and if we must populate the store non-atomically then G and S should be added before F. + * + * @since 4.6.1 + */ +abstract class DataModelDependencies { + private DataModelDependencies() {} + + static class KindAndKey { + final DataKind kind; + final String key; + + public KindAndKey(DataKind kind, String key) { + this.kind = kind; + this.key = key; + } + + @Override + public boolean equals(Object other) { + if (other instanceof KindAndKey) { + KindAndKey o = (KindAndKey)other; + return kind == o.kind && key.equals(o.key); + } + return false; + } + + @Override + public int hashCode() { + return kind.hashCode() * 31 + key.hashCode(); + } + } + + /** + * Returns the immediate dependencies from the given item. + * + * @param fromKind the item's kind + * @param fromItem the item descriptor + * @return the flags and/or segments that this item depends on + */ + public static Set computeDependenciesFrom(DataKind fromKind, ItemDescriptor fromItem) { + if (fromItem == null || fromItem.getItem() == null) { + return emptySet(); + } + if (fromKind == FEATURES) { + DataModel.FeatureFlag flag = (DataModel.FeatureFlag)fromItem.getItem(); + + Iterable prereqFlagKeys = transform(flag.getPrerequisites(), p -> p.getKey()); + + Iterable segmentKeys = concat( + transform( + flag.getRules(), + rule -> segmentKeysFromClauses(rule.getClauses())) + ); + + return ImmutableSet.copyOf( + concat(kindAndKeys(FEATURES, prereqFlagKeys), kindAndKeys(SEGMENTS, segmentKeys)) + ); + } else if (fromKind == SEGMENTS) { + DataModel.Segment segment = (DataModel.Segment)fromItem.getItem(); + + Iterable nestedSegmentKeys = concat( + transform( + segment.getRules(), + rule -> segmentKeysFromClauses(rule.getClauses()))); + return ImmutableSet.copyOf(kindAndKeys(SEGMENTS, nestedSegmentKeys)); + } + return emptySet(); + } + + private static Iterable kindAndKeys(DataKind kind, Iterable keys) { + return transform(keys, key -> new KindAndKey(kind, key)); + } + + private static Iterable segmentKeysFromClauses(Iterable clauses) { + return concat(Iterables.>transform( + clauses, + clause -> clause.getOp() == Operator.segmentMatch ? + transform(clause.getValues(), LDValue::stringValue) : + emptyList() + )); + } + + /** + * Returns a copy of the input data set that guarantees that if you iterate through it the outer list and + * the inner list in the order provided, any object that depends on another object will be updated after it. + * + * @param allData the unordered data set + * @return a map with a defined ordering + */ + public static FullDataSet sortAllCollections(FullDataSet allData) { + ImmutableSortedMap.Builder> builder = + ImmutableSortedMap.orderedBy(dataKindPriorityOrder); + for (Map.Entry> entry: allData.getData()) { + DataKind kind = entry.getKey(); + builder.put(kind, sortCollection(kind, entry.getValue())); + } + return new FullDataSet<>(builder.build().entrySet()); + } + + private static KeyedItems sortCollection(DataKind kind, KeyedItems input) { + if (!isDependencyOrdered(kind) || isEmpty(input.getItems())) { + return input; + } + + Map remainingItems = new HashMap<>(); + for (Map.Entry e: input.getItems()) { + remainingItems.put(e.getKey(), e.getValue()); + } + ImmutableMap.Builder builder = ImmutableMap.builder(); + // Note, ImmutableMap guarantees that the iteration order will be the same as the builder insertion order + + while (!remainingItems.isEmpty()) { + // pick a random item that hasn't been updated yet + for (Map.Entry entry: remainingItems.entrySet()) { + addWithDependenciesFirst(kind, entry.getKey(), entry.getValue(), remainingItems, builder); + break; + } + } + + return new KeyedItems<>(builder.build().entrySet()); + } + + private static void addWithDependenciesFirst(DataKind kind, + String key, + ItemDescriptor item, + Map remainingItems, + ImmutableMap.Builder builder) { + remainingItems.remove(key); // we won't need to visit this item again + for (KindAndKey dependency: computeDependenciesFrom(kind, item)) { + if (dependency.kind == kind) { + ItemDescriptor prereqItem = remainingItems.get(dependency.key); + if (prereqItem != null) { + addWithDependenciesFirst(kind, dependency.key, prereqItem, remainingItems, builder); + } + } + } + builder.put(key, item); + } + + private static boolean isDependencyOrdered(DataKind kind) { + return kind == FEATURES; + } + + private static int getPriority(DataKind kind) { + if (kind == FEATURES) { + return 1; + } else if (kind == SEGMENTS) { + return 0; + } else { + return kind.getName().length() + 2; + } + } + + private static Comparator dataKindPriorityOrder = new Comparator() { + @Override + public int compare(DataKind o1, DataKind o2) { + return getPriority(o1) - getPriority(o2); + } + }; + + /** + * Maintains a bidirectional dependency graph that can be updated whenever an item has changed. + */ + static final class DependencyTracker { + private final Map> dependenciesFrom = new HashMap<>(); + private final Map> dependenciesTo = new HashMap<>(); + + /** + * Updates the dependency graph when an item has changed. + * + * @param fromKind the changed item's kind + * @param fromKey the changed item's key + * @param fromItem the changed item + */ + public void updateDependenciesFrom(DataKind fromKind, String fromKey, ItemDescriptor fromItem) { + KindAndKey fromWhat = new KindAndKey(fromKind, fromKey); + Set updatedDependencies = computeDependenciesFrom(fromKind, fromItem); // never null + + Set oldDependencySet = dependenciesFrom.get(fromWhat); + if (oldDependencySet != null) { + for (KindAndKey oldDep: oldDependencySet) { + Set depsToThisOldDep = dependenciesTo.get(oldDep); + if (depsToThisOldDep != null) { + // COVERAGE: cannot cause this condition in unit tests, it should never be null + depsToThisOldDep.remove(fromWhat); + } + } + } + dependenciesFrom.put(fromWhat, updatedDependencies); + for (KindAndKey newDep: updatedDependencies) { + Set depsToThisNewDep = dependenciesTo.get(newDep); + if (depsToThisNewDep == null) { + depsToThisNewDep = new HashSet<>(); + dependenciesTo.put(newDep, depsToThisNewDep); + } + depsToThisNewDep.add(fromWhat); + } + } + + public void reset() { + dependenciesFrom.clear(); + dependenciesTo.clear(); + } + + /** + * Populates the given set with the union of the initial item and all items that directly or indirectly + * depend on it (based on the current state of the dependency graph). + * + * @param itemsOut an existing set to be updated + * @param initialModifiedItem an item that has been modified + */ + public void addAffectedItems(Set itemsOut, KindAndKey initialModifiedItem) { + if (!itemsOut.contains(initialModifiedItem)) { + itemsOut.add(initialModifiedItem); + Set affectedItems = dependenciesTo.get(initialModifiedItem); + if (affectedItems != null) { + for (KindAndKey affectedItem: affectedItems) { + addAffectedItems(itemsOut, affectedItem); + } + } + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java new file mode 100644 index 0000000..e24a07d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelPreprocessing.java @@ -0,0 +1,313 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.Target; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.function.Function; +import java.util.regex.Pattern; + +import static com.launchdarkly.sdk.server.DataModel.Operator.after; +import static com.launchdarkly.sdk.server.DataModel.Operator.before; +import static com.launchdarkly.sdk.server.DataModel.Operator.in; +import static com.launchdarkly.sdk.server.DataModel.Operator.matches; +import static com.launchdarkly.sdk.server.DataModel.Operator.semVerEqual; +import static com.launchdarkly.sdk.server.DataModel.Operator.semVerGreaterThan; +import static com.launchdarkly.sdk.server.DataModel.Operator.semVerLessThan; + +/** + * Additional information that we attach to our data model to reduce the overhead of feature flag + * evaluations. The methods that create these objects are called by the afterDeserialized() methods + * of FeatureFlag and Segment, after those objects have been deserialized from JSON but before they + * have been made available to any other code (so these methods do not need to be thread-safe). + *

+ * If for some reason these methods have not been called before an evaluation happens, the evaluation + * logic must still be able to work without the precomputed data. + */ +abstract class DataModelPreprocessing { + private DataModelPreprocessing() {} + + static final class EvalResultsForSingleVariation { + private final EvalResult regularResult; + private final EvalResult inExperimentResult; + + EvalResultsForSingleVariation( + LDValue value, + int variationIndex, + EvaluationReason regularReason, + EvaluationReason inExperimentReason, + boolean alwaysInExperiment + ) { + this.regularResult = EvalResult.of(value, variationIndex, regularReason).withForceReasonTracking(alwaysInExperiment); + this.inExperimentResult = EvalResult.of(value, variationIndex, inExperimentReason).withForceReasonTracking(true); + } + + EvalResult getResult(boolean inExperiment) { + return inExperiment ? inExperimentResult : regularResult; + } + } + + static final class EvalResultFactoryMultiVariations { + private final List variations; + + EvalResultFactoryMultiVariations( + List variations + ) { + this.variations = variations; + } + + EvalResult forVariation(int index, boolean inExperiment) { + if (index < 0 || index >= variations.size()) { + return EvalResult.error(ErrorKind.MALFORMED_FLAG); + } + + if (variations.get(index) == null) { + // getting here indicates that the preprocessor incorrectly processed data and another piece of code is + // asking for a variation that was not populated ahead of time. This is an unexpected bug if it happens. + return EvalResult.error(ErrorKind.EXCEPTION); + } + + return variations.get(index).getResult(inExperiment); + } + } + + static final class FlagPreprocessed { + EvalResult offResult; + EvalResultFactoryMultiVariations fallthroughResults; + + FlagPreprocessed(EvalResult offResult, + EvalResultFactoryMultiVariations fallthroughResults) { + this.offResult = offResult; + this.fallthroughResults = fallthroughResults; + } + } + + static final class PrerequisitePreprocessed { + final EvalResult prerequisiteFailedResult; + + PrerequisitePreprocessed(EvalResult prerequisiteFailedResult) { + this.prerequisiteFailedResult = prerequisiteFailedResult; + } + } + + static final class TargetPreprocessed { + final EvalResult targetMatchResult; + + TargetPreprocessed(EvalResult targetMatchResult) { + this.targetMatchResult = targetMatchResult; + } + } + + static final class FlagRulePreprocessed { + final EvalResultFactoryMultiVariations allPossibleResults; + + FlagRulePreprocessed( + EvalResultFactoryMultiVariations allPossibleResults + ) { + this.allPossibleResults = allPossibleResults; + } + } + + static final class ClausePreprocessed { + final Set valuesSet; + final List valuesExtra; + + ClausePreprocessed(Set valuesSet, List valuesExtra) { + this.valuesSet = valuesSet; + this.valuesExtra = valuesExtra; + } + + static final class ValueData { + final Instant parsedDate; + final Pattern parsedRegex; + final SemanticVersion parsedSemVer; + + ValueData(Instant parsedDate, Pattern parsedRegex, SemanticVersion parsedSemVer) { + this.parsedDate = parsedDate; + this.parsedRegex = parsedRegex; + this.parsedSemVer = parsedSemVer; + } + } + } + + static void preprocessFlag(FeatureFlag f) { + f.preprocessed = new FlagPreprocessed( + EvaluatorHelpers.offResult(f), + precomputeMultiVariationResultsForFlag(f, EvaluationReason.fallthrough(false), + EvaluationReason.fallthrough(true), f.isTrackEventsFallthrough()) + ); + + for (Prerequisite p: f.getPrerequisites()) { + preprocessPrerequisite(p, f); + } + for (Target t: f.getTargets()) { + preprocessTarget(t, f); + } + for (Target t: f.getContextTargets()) { + preprocessTarget(t, f); + } + List rules = f.getRules(); + int n = rules.size(); + for (int i = 0; i < n; i++) { + preprocessFlagRule(rules.get(i), i, f); + } + preprocessValueList(f.getVariations()); + } + + static void preprocessSegment(Segment s) { + List rules = s.getRules(); + int n = rules.size(); + for (int i = 0; i < n; i++) { + preprocessSegmentRule(rules.get(i), i); + } + } + + static void preprocessPrerequisite(Prerequisite p, FeatureFlag f) { + // Precompute an immutable EvaluationDetail instance that will be used if the prerequisite fails. + // This behaves the same as an "off" result except for the reason. + p.preprocessed = new PrerequisitePreprocessed(EvaluatorHelpers.prerequisiteFailedResult(f, p)); + } + + static void preprocessTarget(Target t, FeatureFlag f) { + // Precompute an immutable EvalResult instance that will be used if this target matches. + t.preprocessed = new TargetPreprocessed(EvaluatorHelpers.targetMatchResult(f, t)); + } + + static void preprocessFlagRule(Rule r, int ruleIndex, FeatureFlag f) { + EvaluationReason ruleMatchReason = EvaluationReason.ruleMatch(ruleIndex, r.getId(), false); + EvaluationReason ruleMatchReasonInExperiment = EvaluationReason.ruleMatch(ruleIndex, r.getId(), true); + r.preprocessed = new FlagRulePreprocessed(precomputeMultiVariationResultsForRule(f, r, + ruleMatchReason, ruleMatchReasonInExperiment, r.isTrackEvents())); + + for (Clause c: r.getClauses()) { + preprocessClause(c); + } + } + + static void preprocessSegmentRule(SegmentRule r, int ruleIndex) { + for (Clause c: r.getClauses()) { + preprocessClause(c); + } + } + + static void preprocessClause(Clause c) { + // If the clause values contain a null (which is valid in terms of the JSON schema, even if it + // can't ever produce a true result), Gson will give us an actual null. Change this to + // LDValue.ofNull() to avoid NPEs down the line. It's more efficient to do this just once at + // deserialization time than to do it in every clause match. + List values = c.getValues(); + preprocessValueList(values); + + Operator op = c.getOp(); + if (op == null) { + return; + } + if (op == in) { + // This is a special case where the clause is testing for an exact match against any of the + // clause values. Converting the value list to a Set allows us to do a fast lookup instead of + // a linear search. We do not do this for other operators (or if there are fewer than two + // values) because the slight extra overhead of a Set is not worthwhile in those case. + if (values.size() > 1) { + c.preprocessed = new ClausePreprocessed(ImmutableSet.copyOf(values), null); + } + } else if (op == matches) { + c.preprocessed = preprocessClauseValues(c.getValues(), v -> + new ClausePreprocessed.ValueData(null, EvaluatorTypeConversion.valueToRegex(v), null) + ); + } else if (op == after || op == before) { + c.preprocessed = preprocessClauseValues(c.getValues(), v -> + new ClausePreprocessed.ValueData(EvaluatorTypeConversion.valueToDateTime(v), null, null) + ); + } else if (op == semVerEqual || op == semVerGreaterThan || op == semVerLessThan) { + c.preprocessed = preprocessClauseValues(c.getValues(), v -> + new ClausePreprocessed.ValueData(null, null, EvaluatorTypeConversion.valueToSemVer(v)) + ); + } + } + + static void preprocessValueList(List values) { + // If a list of values contains a null (which is valid in terms of the JSON schema, even if it + // isn't useful because the SDK considers this a non-value), Gson will give us an actual null. + // Change this to LDValue.ofNull() to avoid NPEs down the line. It's more efficient to do this + // just once at deserialization time than to do it in every clause match. + for (int i = 0; i < values.size(); i++) { + if (values.get(i) == null) { + values.set(i, LDValue.ofNull()); + } + } + } + + private static ClausePreprocessed preprocessClauseValues( + List values, + Function f + ) { + List valuesExtra = new ArrayList<>(values.size()); + for (LDValue v: values) { + valuesExtra.add(f.apply(v)); + } + return new ClausePreprocessed(null, valuesExtra); + } + + private static EvalResultFactoryMultiVariations precomputeMultiVariationResultsForFlag( + FeatureFlag f, + EvaluationReason regularReason, + EvaluationReason inExperimentReason, + boolean alwaysInExperiment + ) { + ArrayList variations = new ArrayList<>(f.getVariations().size()); + for (int i = 0; i < f.getVariations().size(); i++) { + variations.add(new EvalResultsForSingleVariation(f.getVariations().get(i), i, + regularReason, inExperimentReason, alwaysInExperiment)); + } + return new EvalResultFactoryMultiVariations(Collections.unmodifiableList(variations)); + } + + private static EvalResultFactoryMultiVariations precomputeMultiVariationResultsForRule( + FeatureFlag f, + Rule r, + EvaluationReason regularReason, + EvaluationReason inExperimentReason, + boolean alwaysInExperiment + ) { + // Here we create a list of nulls and then insert into that list variations from the rule at their associated index. + // This allows the evaluator to then index into the array in constant time. Alternative options are to use a map or + // a sparse array. The map has high memory footprint for most customer situations, so it was not used. There is no + // standard implementation for sparse array, and it is also not always constant time. Most customers don't have + // many variations per flag and so these arrays should not be large on average. This approach was part of a bugfix + // and this approach cuts the memory footprint enough to meet the need. + List variations = new ArrayList<>(Collections.nCopies(f.getVariations().size(), null)); + if (r.getVariation() != null) { + int index = r.getVariation(); + if (index >= 0 && index < f.getVariations().size()) { + variations.set(index, new EvalResultsForSingleVariation(f.getVariations().get(index), index, + regularReason, inExperimentReason, alwaysInExperiment)); + } + } + + if (r.getRollout() != null && r.getRollout().getVariations() != null) { + for (DataModel.WeightedVariation wv : r.getRollout().getVariations()) { + int index = wv.getVariation(); + if (index >= 0 && index < f.getVariations().size()) { + variations.set(index, new EvalResultsForSingleVariation(f.getVariations().get(index), index, + regularReason, inExperimentReason, alwaysInExperiment)); + } + } + } + + return new EvalResultFactoryMultiVariations(Collections.unmodifiableList(variations)); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java new file mode 100644 index 0000000..387e0ee --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataModelSerialization.java @@ -0,0 +1,367 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonElement; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import java.io.IOException; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstance; +import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstanceWithNullsAllowed; + +/** + * JSON conversion logic specifically for our data model types. + *

+ * More general JSON helpers are in JsonHelpers. + */ +abstract class DataModelSerialization { + /** + * Deserializes a data model object from JSON that was already parsed by Gson. + *

+ * For built-in data model classes, our usual abstraction for deserializing from a string is inefficient in + * this case, because Gson has already parsed the original JSON and then we would have to convert the + * JsonElement back into a string and parse it again. So it's best to call Gson directly instead of going + * through our abstraction in that case, but it's also best to implement that special-casing just once here + * instead of scattered throughout the SDK. + * + * @param kind the data kind + * @param parsedJson the parsed JSON + * @return the deserialized item + */ + static VersionedData deserializeFromParsedJson(DataKind kind, JsonElement parsedJson) throws SerializationException { + VersionedData item; + try { + if (kind == FEATURES) { + item = gsonInstance().fromJson(parsedJson, FeatureFlag.class); + } else if (kind == SEGMENTS) { + item = gsonInstance().fromJson(parsedJson, Segment.class); + } else { + // This shouldn't happen since we only use this method internally with our predefined data kinds + throw new IllegalArgumentException("unknown data kind"); + } + } catch (RuntimeException e) { + // A variety of unchecked exceptions can be thrown from JSON parsing; treat them all the same + throw new SerializationException(e); + } + return item; + } + + /** + * Deserializes a data model object from a Gson reader. + * + * @param kind the data kind + * @param jr the JSON reader + * @return the deserialized item + */ + static VersionedData deserializeFromJsonReader(DataKind kind, JsonReader jr) throws SerializationException { + VersionedData item; + try { + if (kind == FEATURES) { + item = gsonInstance().fromJson(jr, FeatureFlag.class); + } else if (kind == SEGMENTS) { + item = gsonInstance().fromJson(jr, Segment.class); + } else { + // This shouldn't happen since we only use this method internally with our predefined data kinds + throw new IllegalArgumentException("unknown data kind"); + } + } catch (RuntimeException e) { + // A variety of unchecked exceptions can be thrown from JSON parsing; treat them all the same + throw new SerializationException(e); + } + return item; + } + + /** + * Deserializes a full set of flag/segment data from a standard JSON object representation + * in the form {"flags": ..., "segments": ...} (which is used in both streaming and polling + * responses). + * + * @param jr the JSON reader + * @return the deserialized data + */ + static FullDataSet parseFullDataSet(JsonReader jr) throws SerializationException { + ImmutableList.Builder> flags = ImmutableList.builder(); + ImmutableList.Builder> segments = ImmutableList.builder(); + + try { + jr.beginObject(); + while (jr.peek() != JsonToken.END_OBJECT) { + String kindName = jr.nextName(); + Class itemClass; + ImmutableList.Builder> listBuilder; + switch (kindName) { + case "flags": + itemClass = DataModel.FeatureFlag.class; + listBuilder = flags; + break; + case "segments": + itemClass = DataModel.Segment.class; + listBuilder = segments; + break; + default: + jr.skipValue(); + continue; + } + jr.beginObject(); + while (jr.peek() != JsonToken.END_OBJECT) { + String key = jr.nextName(); + @SuppressWarnings("unchecked") + Object item = JsonHelpers.deserialize(jr, (Class)itemClass); + listBuilder.add(new AbstractMap.SimpleEntry<>(key, + new ItemDescriptor(((VersionedData)item).getVersion(), item))); + } + jr.endObject(); + } + jr.endObject(); + + return new FullDataSet(ImmutableMap.of( + FEATURES, new KeyedItems<>(flags.build()), + SEGMENTS, new KeyedItems<>(segments.build()) + ).entrySet()); + } catch (IOException e) { + throw new SerializationException(e); + } catch (RuntimeException e) { + // A variety of unchecked exceptions can be thrown from JSON parsing; treat them all the same + throw new SerializationException(e); + } + } + + // Custom deserialization logic for Clause because the attribute field is treated differently + // depending on the contextKind field (if contextKind is null, we always parse attribute as a + // literal attribute name and not a reference). + static class ClauseTypeAdapter extends TypeAdapter { + @Override + public void write(JsonWriter out, Clause c) throws IOException { + out.beginObject(); + if (c.getContextKind() != null) { + out.name("contextKind").value(c.getContextKind().toString()); + } + out.name("attribute").value(c.getAttribute() == null ? null : c.getAttribute().toString()); + out.name("op").value(c.getOp() == null ? null : c.getOp().name()); + out.name("values").beginArray(); + for (LDValue v: c.getValues()) { + gsonInstanceWithNullsAllowed().toJson(v, LDValue.class, out); + } + out.endArray(); + out.name("negate").value(c.isNegate()); + out.endObject(); + } + + @Override + public Clause read(JsonReader in) throws IOException { + ContextKind contextKind = null; + String attrString = null; + Operator op = null; + List values = new ArrayList<>(); + boolean negate = false; + in.beginObject(); + while (in.hasNext()) { + switch (in.nextName()) { + case "contextKind": + contextKind = ContextKind.of(in.nextString()); + break; + case "attribute": + attrString = in.nextString(); + break; + case "op": + op = Operator.forName(in.nextString()); + break; + case "values": + if (in.peek() == JsonToken.NULL) { + in.skipValue(); + } else { + in.beginArray(); + while (in.hasNext()) { + LDValue value = gsonInstanceWithNullsAllowed().fromJson(in, LDValue.class); + values.add(value); + } + in.endArray(); + } + break; + case "negate": + negate = in.nextBoolean(); + break; + default: + in.skipValue(); + } + } + in.endObject(); + AttributeRef attribute = attributeNameOrPath(attrString, contextKind); + return new Clause(contextKind, attribute, op, values, negate); + } + } + + // Custom deserialization logic for Rollout for a similar reason to Clause. + static class RolloutTypeAdapter extends TypeAdapter { + @Override + public void write(JsonWriter out, Rollout r) throws IOException { + out.beginObject(); + if (r.getContextKind() != null) { + out.name("contextKind").value(r.getContextKind().toString()); + } + out.name("variations").beginArray(); + for (WeightedVariation wv: r.getVariations()) { + gsonInstanceWithNullsAllowed().toJson(wv, WeightedVariation.class, out); + } + out.endArray(); + if (r.getBucketBy() != null) { + out.name("bucketBy").value(r.getBucketBy().toString()); + } + if (r.getKind() != RolloutKind.rollout) { + out.name("kind").value(r.getKind().name()); + } + if (r.getSeed() != null) { + out.name("seed").value(r.getSeed()); + } + out.endObject(); + } + + @Override + public Rollout read(JsonReader in) throws IOException { + ContextKind contextKind = null; + List variations = new ArrayList<>(); + String bucketByString = null; + RolloutKind kind = RolloutKind.rollout; + Integer seed = null; + in.beginObject(); + while (in.hasNext()) { + switch (in.nextName()) { + case "contextKind": + contextKind = ContextKind.of(in.nextString()); + break; + case "variations": + if (in.peek() == JsonToken.NULL) { + in.skipValue(); + } else { + in.beginArray(); + while (in.hasNext()) { + WeightedVariation wv = gsonInstanceWithNullsAllowed().fromJson(in, WeightedVariation.class); + variations.add(wv); + } + in.endArray(); + } + break; + case "bucketBy": + bucketByString = in.nextString(); + break; + case "kind": + kind = RolloutKind.experiment.name().equals(in.nextString()) ? RolloutKind.experiment : + RolloutKind.rollout; + break; + case "seed": + seed = readNullableInt(in); + break; + default: + in.skipValue(); + } + } + in.endObject(); + AttributeRef bucketBy = attributeNameOrPath(bucketByString, contextKind); + return new Rollout(contextKind, variations, bucketBy, kind, seed); + } + } + + // Custom deserialization logic for SegmentRule for a similar reason to Clause. + static class SegmentRuleTypeAdapter extends TypeAdapter { + @Override + public void write(JsonWriter out, SegmentRule sr) throws IOException { + out.beginObject(); + out.name("clauses").beginArray(); + for (Clause c: sr.getClauses()) { + gsonInstanceWithNullsAllowed().toJson(c, Clause.class, out); + } + out.endArray(); + if (sr.getWeight() != null) { + out.name("weight").value(sr.getWeight()); + } + if (sr.getRolloutContextKind() != null) { + out.name("rolloutContextKind").value(sr.getRolloutContextKind().toString()); + } + if (sr.getBucketBy() != null) { + out.name("bucketBy").value(sr.getBucketBy().toString()); + } + out.endObject(); + } + + @Override + public SegmentRule read(JsonReader in) throws IOException { + List clauses = new ArrayList<>(); + Integer weight = null; + ContextKind rolloutContextKind = null; + String bucketByString = null; + in.beginObject(); + while (in.hasNext()) { + switch (in.nextName()) { + case "clauses": + if (in.peek() == JsonToken.NULL) { + in.skipValue(); + } else { + in.beginArray(); + while (in.hasNext()) { + Clause c = gsonInstanceWithNullsAllowed().fromJson(in, Clause.class); + clauses.add(c); + } + in.endArray(); + } + break; + case "weight": + weight = readNullableInt(in); + break; + case "rolloutContextKind": + rolloutContextKind = ContextKind.of(in.nextString()); + break; + case "bucketBy": + bucketByString = in.nextString(); + break; + default: + in.skipValue(); + } + } + in.endObject(); + AttributeRef bucketBy = attributeNameOrPath(bucketByString, rolloutContextKind); + return new SegmentRule(clauses, weight, rolloutContextKind, bucketBy); + } + } + + static Integer readNullableInt(JsonReader in) throws IOException { + if (in.peek() == JsonToken.NULL) { + in.skipValue(); + return null; + } + return in.nextInt(); + } + + static AttributeRef attributeNameOrPath(String attrString, ContextKind contextKind) { + if (attrString == null) { + return null; + } + return contextKind == null ? AttributeRef.fromLiteral(attrString) : AttributeRef.fromPath(attrString); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImpl.java new file mode 100644 index 0000000..9eff71c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImpl.java @@ -0,0 +1,38 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; + +import java.time.Duration; + +final class DataSourceStatusProviderImpl implements DataSourceStatusProvider { + private final EventBroadcasterImpl dataSourceStatusNotifier; + private final DataSourceUpdatesImpl dataSourceUpdates; + + DataSourceStatusProviderImpl( + EventBroadcasterImpl dataSourceStatusNotifier, + DataSourceUpdatesImpl dataSourceUpdates + ) { + this.dataSourceStatusNotifier = dataSourceStatusNotifier; + this.dataSourceUpdates = dataSourceUpdates; + } + + @Override + public Status getStatus() { + return dataSourceUpdates.getLastStatus(); + } + + @Override + public boolean waitFor(State desiredState, Duration timeout) throws InterruptedException { + return dataSourceUpdates.waitFor(desiredState, timeout); + } + + @Override + public void addStatusListener(StatusListener listener) { + dataSourceStatusNotifier.register(listener); + } + + @Override + public void removeStatusListener(StatusListener listener) { + dataSourceStatusNotifier.unregister(listener); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java new file mode 100644 index 0000000..d200c49 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataSourceUpdatesImpl.java @@ -0,0 +1,360 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.server.DataModelDependencies.KindAndKey; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.StatusListener; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; + +import java.time.Duration; +import java.time.Instant; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static com.google.common.collect.Iterables.concat; +import static com.google.common.collect.Iterables.transform; +import static com.launchdarkly.sdk.server.DataModel.ALL_DATA_KINDS; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static java.util.Collections.emptyMap; + +/** + * The data source will push updates into this component. We then apply any necessary + * transformations before putting them into the data store; currently that just means sorting + * the data set for init(). We also generate flag change events for any updates or deletions. + *

+ * This component is also responsible for receiving updates to the data source status, broadcasting + * them to any status listeners, and tracking the length of any period of sustained failure. + * + * @since 4.11.0 + */ +final class DataSourceUpdatesImpl implements DataSourceUpdateSink { + private final DataStore store; + private final EventBroadcasterImpl flagChangeEventNotifier; + private final EventBroadcasterImpl dataSourceStatusNotifier; + private final DataModelDependencies.DependencyTracker dependencyTracker = new DataModelDependencies.DependencyTracker(); + private final DataStoreStatusProvider dataStoreStatusProvider; + private final OutageTracker outageTracker; + private final Object stateLock = new Object(); + private final LDLogger logger; + + private volatile Status currentStatus; + private volatile boolean lastStoreUpdateFailed = false; + volatile Consumer onOutageErrorLog = null; // test instrumentation + + DataSourceUpdatesImpl( + DataStore store, + DataStoreStatusProvider dataStoreStatusProvider, + EventBroadcasterImpl flagChangeEventNotifier, + EventBroadcasterImpl dataSourceStatusNotifier, + ScheduledExecutorService sharedExecutor, + Duration outageLoggingTimeout, + LDLogger baseLogger + ) { + this.store = store; + this.flagChangeEventNotifier = flagChangeEventNotifier; + this.dataSourceStatusNotifier = dataSourceStatusNotifier; + this.dataStoreStatusProvider = dataStoreStatusProvider; + this.outageTracker = new OutageTracker(sharedExecutor, outageLoggingTimeout); + this.logger = baseLogger.subLogger(Loggers.DATA_SOURCE_LOGGER_NAME); + + currentStatus = new Status(State.INITIALIZING, Instant.now(), null); + } + + @Override + public boolean init(FullDataSet allData) { + Map> oldData = null; + + try { + if (hasFlagChangeEventListeners()) { + // Query the existing data if any, so that after the update we can send events for whatever was changed + oldData = new HashMap<>(); + for (DataKind kind: ALL_DATA_KINDS) { + KeyedItems items = store.getAll(kind); + oldData.put(kind, ImmutableMap.copyOf(items.getItems())); + } + } + store.init(DataModelDependencies.sortAllCollections(allData)); + lastStoreUpdateFailed = false; + } catch (RuntimeException e) { + reportStoreFailure(e); + return false; + } + + // We must always update the dependency graph even if we don't currently have any event listeners, because if + // listeners are added later, we don't want to have to reread the whole data store to compute the graph + updateDependencyTrackerFromFullDataSet(allData); + + // Now, if we previously queried the old data because someone is listening for flag change events, compare + // the versions of all items and generate events for those (and any other items that depend on them) + if (oldData != null) { + sendChangeEvents(computeChangedItemsForFullDataSet(oldData, fullDataSetToMap(allData))); + } + + return true; + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + boolean successfullyUpdated; + try { + successfullyUpdated = store.upsert(kind, key, item); + lastStoreUpdateFailed = false; + } catch (RuntimeException e) { + reportStoreFailure(e); + return false; + } + + if (successfullyUpdated) { + dependencyTracker.updateDependenciesFrom(kind, key, item); + if (hasFlagChangeEventListeners()) { + Set affectedItems = new HashSet<>(); + dependencyTracker.addAffectedItems(affectedItems, new KindAndKey(kind, key)); + sendChangeEvents(affectedItems); + } + } + + return true; + } + + @Override + public DataStoreStatusProvider getDataStoreStatusProvider() { + return dataStoreStatusProvider; + } + + @Override + public void updateStatus(State newState, ErrorInfo newError) { + if (newState == null) { + return; + } + + Status statusToBroadcast = null; + + synchronized (stateLock) { + Status oldStatus = currentStatus; + + if (newState == State.INTERRUPTED && oldStatus.getState() == State.INITIALIZING) { + newState = State.INITIALIZING; // see comment on updateStatus in the DataSourceUpdates interface + } + + if (newState != oldStatus.getState() || newError != null) { + currentStatus = new Status( + newState, + newState == currentStatus.getState() ? currentStatus.getStateSince() : Instant.now(), + newError == null ? currentStatus.getLastError() : newError + ); + statusToBroadcast = currentStatus; + stateLock.notifyAll(); + } + + outageTracker.trackDataSourceState(newState, newError); + } + + if (statusToBroadcast != null) { + dataSourceStatusNotifier.broadcast(statusToBroadcast); + } + } + + // package-private - called from DataSourceStatusProviderImpl + Status getLastStatus() { + synchronized (stateLock) { + return currentStatus; + } + } + + // package-private - called from DataSourceStatusProviderImpl + boolean waitFor(State desiredState, Duration timeout) throws InterruptedException { + long deadline = System.currentTimeMillis() + timeout.toMillis(); + synchronized (stateLock) { + while (true) { + if (currentStatus.getState() == desiredState) { + return true; + } + if (currentStatus.getState() == State.OFF) { + return false; + } + if (timeout.isZero()) { + stateLock.wait(); + } else { + long now = System.currentTimeMillis(); + if (now >= deadline) { + return false; + } + stateLock.wait(deadline - now); + } + } + } + } + + private boolean hasFlagChangeEventListeners() { + return flagChangeEventNotifier.hasListeners(); + } + + private void sendChangeEvents(Iterable affectedItems) { + for (KindAndKey item: affectedItems) { + if (item.kind == FEATURES) { + flagChangeEventNotifier.broadcast(new FlagChangeEvent(item.key)); + } + } + } + + private void updateDependencyTrackerFromFullDataSet(FullDataSet allData) { + dependencyTracker.reset(); + for (Map.Entry> e0: allData.getData()) { + DataKind kind = e0.getKey(); + for (Map.Entry e1: e0.getValue().getItems()) { + String key = e1.getKey(); + dependencyTracker.updateDependenciesFrom(kind, key, e1.getValue()); + } + } + } + + private Map> fullDataSetToMap(FullDataSet allData) { + Map> ret = new HashMap<>(); + for (Map.Entry> e: allData.getData()) { + ret.put(e.getKey(), ImmutableMap.copyOf(e.getValue().getItems())); + } + return ret; + } + + private Set computeChangedItemsForFullDataSet(Map> oldDataMap, + Map> newDataMap) { + Set affectedItems = new HashSet<>(); + for (DataKind kind: ALL_DATA_KINDS) { + Map oldItems = oldDataMap.get(kind); + Map newItems = newDataMap.get(kind); + if (oldItems == null) { + // COVERAGE: there is no way to simulate this condition in unit tests + oldItems = emptyMap(); + } + if (newItems == null) { + newItems = emptyMap(); + } + Set allKeys = ImmutableSet.copyOf(concat(oldItems.keySet(), newItems.keySet())); + for (String key: allKeys) { + ItemDescriptor oldItem = oldItems.get(key); + ItemDescriptor newItem = newItems.get(key); + if (oldItem == null && newItem == null) { // shouldn't be possible due to how we computed allKeys + // COVERAGE: there is no way to simulate this condition in unit tests + continue; + } + if (oldItem == null || newItem == null || oldItem.getVersion() < newItem.getVersion()) { + dependencyTracker.addAffectedItems(affectedItems, new KindAndKey(kind, key)); + } + // Note that comparing the version numbers is sufficient; we don't have to compare every detail of the + // flag or segment configuration, because it's a basic underlying assumption of the entire LD data model + // that if an entity's version number hasn't changed, then the entity hasn't changed (and that if two + // version numbers are different, the higher one is the more recent version). + } + } + return affectedItems; + } + + private void reportStoreFailure(RuntimeException e) { + if (!lastStoreUpdateFailed) { + logger.warn("Unexpected data store error when trying to store an update received from the data source: {}", + LogValues.exceptionSummary(e)); + lastStoreUpdateFailed = true; + } + logger.debug(LogValues.exceptionTrace(e)); + updateStatus(State.INTERRUPTED, ErrorInfo.fromException(ErrorKind.STORE_ERROR, e)); + } + + // Encapsulates our logic for keeping track of the length and cause of data source outages. + private final class OutageTracker { + private final boolean enabled; + private final ScheduledExecutorService sharedExecutor; + private final Duration loggingTimeout; + private final HashMap errorCounts = new HashMap<>(); + + private volatile boolean inOutage; + private volatile ScheduledFuture timeoutFuture; + + OutageTracker(ScheduledExecutorService sharedExecutor, Duration loggingTimeout) { + this.sharedExecutor = sharedExecutor; + this.loggingTimeout = loggingTimeout; + this.enabled = loggingTimeout != null; + } + + void trackDataSourceState(State newState, ErrorInfo newError) { + if (!enabled) { + return; + } + + synchronized (this) { + if (newState == State.INTERRUPTED || newError != null || (newState == State.INITIALIZING && inOutage)) { + // We are in a potentially recoverable outage. If that wasn't the case already, and if we've been configured + // with a timeout for logging the outage at a higher level, schedule that timeout. + if (inOutage) { + // We were already in one - just record this latest error for logging later. + recordError(newError); + } else { + // We weren't already in one, so set the timeout and start recording errors. + inOutage = true; + errorCounts.clear(); + recordError(newError); + timeoutFuture = sharedExecutor.schedule(this::onTimeout, loggingTimeout.toMillis(), TimeUnit.MILLISECONDS); + } + } else { + if (timeoutFuture != null) { + timeoutFuture.cancel(false); + timeoutFuture = null; + } + inOutage = false; + } + } + } + + private void recordError(ErrorInfo newError) { + // Accumulate how many times each kind of error has occurred during the outage - use just the basic + // properties as the key so the map won't expand indefinitely + ErrorInfo basicErrorInfo = new ErrorInfo(newError.getKind(), newError.getStatusCode(), null, null); + errorCounts.compute(basicErrorInfo, (key, oldValue) -> oldValue == null ? 1 : oldValue.intValue() + 1); + } + + private void onTimeout() { + String errorsDesc; + synchronized (this) { + if (timeoutFuture == null || !inOutage) { + // COVERAGE: there is no way to simulate this condition in unit tests + return; + } + timeoutFuture = null; + errorsDesc = Joiner.on(", ").join(transform(errorCounts.entrySet(), DataSourceUpdatesImpl::describeErrorCount)); + } + if (onOutageErrorLog != null) { + onOutageErrorLog.accept(errorsDesc); + } + logger.error( + "A streaming connection to LaunchDarkly has not been established within {} after the connection was interrupted. " + + "The following errors were encountered: {}", + Util.describeDuration(loggingTimeout), + errorsDesc + ); + } + } + + private static String describeErrorCount(Map.Entry entry) { + return entry.getKey() + " (" + entry.getValue() + (entry.getValue() == 1 ? " time" : " times") + ")"; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java new file mode 100644 index 0000000..7695ca7 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImpl.java @@ -0,0 +1,42 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +final class DataStoreStatusProviderImpl implements DataStoreStatusProvider { + private final DataStore store; + private final DataStoreUpdatesImpl dataStoreUpdates; + + DataStoreStatusProviderImpl( + DataStore store, + DataStoreUpdatesImpl dataStoreUpdates + ) { + this.store = store; + this.dataStoreUpdates = dataStoreUpdates; + } + + @Override + public Status getStatus() { + return dataStoreUpdates.lastStatus.get(); + } + + @Override + public void addStatusListener(StatusListener listener) { + dataStoreUpdates.statusBroadcaster.register(listener); + } + + @Override + public void removeStatusListener(StatusListener listener) { + dataStoreUpdates.statusBroadcaster.unregister(listener); + } + + @Override + public boolean isStatusMonitoringEnabled() { + return store.isStatusMonitoringEnabled(); + } + + @Override + public CacheStats getCacheStats() { + return store.getCacheStats(); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java new file mode 100644 index 0000000..21a1cab --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DataStoreUpdatesImpl.java @@ -0,0 +1,29 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataStoreUpdateSink; + +import java.util.concurrent.atomic.AtomicReference; + +class DataStoreUpdatesImpl implements DataStoreUpdateSink { + // package-private because it's convenient to use these from DataStoreStatusProviderImpl + final EventBroadcasterImpl statusBroadcaster; + final AtomicReference lastStatus; + + DataStoreUpdatesImpl( + EventBroadcasterImpl statusBroadcaster + ) { + this.statusBroadcaster = statusBroadcaster; + this.lastStatus = new AtomicReference<>(new DataStoreStatusProvider.Status(true, false)); // initially "available" + } + + @Override + public void updateStatus(DataStoreStatusProvider.Status newStatus) { + if (newStatus != null) { + DataStoreStatusProvider.Status oldStatus = lastStatus.getAndSet(newStatus); + if (!newStatus.equals(oldStatus)) { + statusBroadcaster.broadcast(newStatus); + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java new file mode 100644 index 0000000..472c4bb --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultEventProcessorWrapper.java @@ -0,0 +1,82 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.DefaultEventProcessor; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.internal.events.EventsConfiguration; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; + +import java.io.IOException; +import java.util.Optional; + +final class DefaultEventProcessorWrapper implements EventProcessor { + private final DefaultEventProcessor eventProcessor; + final EventsConfiguration eventsConfig; // visible for testing + + DefaultEventProcessorWrapper(ClientContext clientContext, EventsConfiguration eventsConfig) { + this.eventsConfig = eventsConfig; + LDLogger baseLogger = clientContext.getBaseLogger(); + LDLogger logger = baseLogger.subLogger(Loggers.EVENTS_LOGGER_NAME); + eventProcessor = new DefaultEventProcessor( + eventsConfig, + ClientContextImpl.get(clientContext).sharedExecutor, + clientContext.getThreadPriority(), + logger + ); + } + + @Override + public void recordEvaluationEvent(LDContext context, String flagKey, int flagVersion, int variation, + LDValue value, EvaluationReason reason, LDValue defaultValue, String prerequisiteOfFlagKey, + boolean requireFullEvent, Long debugEventsUntilDate, boolean excludeFromSummaries, + Long samplingRatio) { + eventProcessor.sendEvent(new Event.FeatureRequest( + System.currentTimeMillis(), + flagKey, + context, + flagVersion, + variation, + value, + defaultValue, + reason, + prerequisiteOfFlagKey, + requireFullEvent, + debugEventsUntilDate, + false, + samplingRatio != null ? samplingRatio : 1, + excludeFromSummaries + )); + } + + @Override + public void recordIdentifyEvent(LDContext context) { + eventProcessor.sendEvent(new Event.Identify(System.currentTimeMillis(), context)); + } + + @Override + public void recordCustomEvent(LDContext context, String eventKey, LDValue data, Double metricValue) { + eventProcessor.sendEvent(new Event.Custom(System.currentTimeMillis(), eventKey, context, data, metricValue)); + } + + @Override + public void recordMigrationEvent(MigrationOpTracker tracker) { + Optional event = tracker.createEvent(); + if(event.isPresent()) { + eventProcessor.sendEvent(event.get()); + } + } + + @Override + public void flush() { + eventProcessor.flushAsync(); + } + + @Override + public void close() throws IOException { + eventProcessor.close(); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java new file mode 100644 index 0000000..aad77b1 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/DefaultFeatureRequestor.java @@ -0,0 +1,116 @@ +package com.launchdarkly.sdk.server; + +import static com.launchdarkly.sdk.server.DataModelSerialization.parseFullDataSet; + +import java.io.IOException; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; + +import javax.annotation.Nullable; + +import com.google.common.annotations.VisibleForTesting; +import com.google.gson.stream.JsonReader; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.internal.http.HttpConsts; +import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException; +import com.launchdarkly.sdk.internal.http.HttpHelpers; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import okhttp3.Cache; +import okhttp3.Headers; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; + +/** + * Implementation of getting flag data via a polling request. + */ +final class DefaultFeatureRequestor implements FeatureRequestor { + private static final long MAX_HTTP_CACHE_SIZE_BYTES = 10 * 1024 * 1024; // 10 MB + + private final OkHttpClient httpClient; + @VisibleForTesting + final URI pollingUri; + private final Headers headers; + private final Path cacheDir; + private final LDLogger logger; + + /** + * Creates a {@link DefaultFeatureRequestor} + * + * @param httpProperties that will be used + * @param baseUri that will be used + * @param payloadFilter identifier that will be used to filter objects in the + * payload, provide null for no filtering + * @param logger to log with + */ + DefaultFeatureRequestor(HttpProperties httpProperties, URI baseUri, @Nullable String payloadFilter, LDLogger logger) { + this.logger = logger; + + URI tempUri = HttpHelpers.concatenateUriPath(baseUri, StandardEndpoints.POLLING_REQUEST_PATH); + if (payloadFilter != null) { + if (!payloadFilter.isEmpty()) { + tempUri = HttpHelpers.addQueryParam(tempUri, HttpConsts.QUERY_PARAM_FILTER, payloadFilter); + } else { + logger.info("Payload filter \"{}\" is not valid, not applying filter.", payloadFilter); + } + } + this.pollingUri = tempUri; + + OkHttpClient.Builder httpBuilder = httpProperties.toHttpClientBuilder(); + this.headers = httpProperties.toHeadersBuilder().build(); + + try { + cacheDir = Files.createTempDirectory("LaunchDarklySDK"); + } catch (IOException e) { + throw new RuntimeException("unable to create cache directory for polling", e); + } + Cache cache = new Cache(cacheDir.toFile(), MAX_HTTP_CACHE_SIZE_BYTES); + httpBuilder.cache(cache); + + httpClient = httpBuilder.build(); + } + + public void close() { + HttpProperties.shutdownHttpClient(httpClient); + Util.deleteDirectory(cacheDir); + } + + public FullDataSet getAllData(boolean returnDataEvenIfCached) + throws IOException, HttpErrorException, SerializationException { + Request request = new Request.Builder() + .url(pollingUri.toURL()) + .headers(headers) + .get() + .build(); + + logger.debug("Making request: " + request); + + try (Response response = httpClient.newCall(request).execute()) { + boolean wasCached = response.networkResponse() == null || response.networkResponse().code() == 304; + if (wasCached && !returnDataEvenIfCached) { + logger.debug("Get flag(s) got cached response, will not parse"); + logger.debug("Cache hit count: {} Cache network count: {} ", + httpClient.cache().hitCount(), httpClient.cache().networkCount()); + return null; + } + + logger.debug("Get flag(s) response: {}", response); + logger.debug("Network response: {}", response.networkResponse()); + logger.debug("Cache hit count: {} Cache network count: {}", + httpClient.cache().hitCount(), httpClient.cache().networkCount()); + logger.debug("Cache response: {}", response.cacheResponse()); + + if (!response.isSuccessful()) { + throw new HttpErrorException(response.code()); + } + + JsonReader jr = new JsonReader(response.body().charStream()); + return parseFullDataSet(jr); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResult.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResult.java new file mode 100644 index 0000000..0d0e879 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResult.java @@ -0,0 +1,260 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; + +/** + * Internal container for the results of an evaluation. This consists of: + *

    + *
  • an {@link EvaluationDetail} in a type-agnostic form using {@link LDValue} + *
  • if appropriate, an additional precomputed {@link EvaluationDetail} for specific Java types + * such as Boolean, so that calling a method like boolVariationDetail won't always have to create + * a new instance + *
  • the boolean forceReasonTracking property (see isForceReasonTracking) + */ +final class EvalResult { + private static final EvaluationDetail WRONG_TYPE_BOOLEAN = wrongTypeWithValue(false); + private static final EvaluationDetail WRONG_TYPE_INTEGER = wrongTypeWithValue((int)0); + private static final EvaluationDetail WRONG_TYPE_DOUBLE = wrongTypeWithValue((double)0); + private static final EvaluationDetail WRONG_TYPE_STRING = wrongTypeWithValue((String)null); + + private final EvaluationDetail anyType; + private final EvaluationDetail asBoolean; + private final EvaluationDetail asInteger; + private final EvaluationDetail asDouble; + private final EvaluationDetail asString; + private final boolean forceReasonTracking; + + /** + * Constructs an instance that wraps the specified EvaluationDetail and also precomputes + * any appropriate type-specific variants (asBoolean, etc.). + * + * @param original the original value + * @return an EvaluatorResult + */ + static EvalResult of(EvaluationDetail original) { + return new EvalResult(original); + } + + /** + * Same as {@link #of(EvaluationDetail)} but specifies the individual properties. + * + * @param value the value + * @param variationIndex the variation index + * @param reason the evaluation reason + * @return an EvaluatorResult + */ + static EvalResult of(LDValue value, int variationIndex, EvaluationReason reason) { + return of(EvaluationDetail.fromValue(value, variationIndex, reason)); + } + + /** + * Constructs an instance for an error result. The value is always null in this case because + * this is a generalized result that wasn't produced by an individual variation() call, so + * we do not know what the application might specify as a default value. + * + * @param errorKind the error kind + * @return an instance + */ + static EvalResult error(ErrorKind errorKind) { + return of(LDValue.ofNull(), EvaluationDetail.NO_VARIATION, EvaluationReason.error(errorKind)); + } + + static EvalResult error(ErrorKind errorKind, LDValue defaultValue) { + return of(defaultValue, EvaluationDetail.NO_VARIATION, EvaluationReason.error(errorKind)); + } + + private EvalResult(EvaluationDetail original) { + this.anyType = original.getValue() == null ? + EvaluationDetail.fromValue(LDValue.ofNull(), original.getVariationIndex(), original.getReason()) : + original; + this.forceReasonTracking = original.getReason().isInExperiment(); + + LDValue value = anyType.getValue(); + int index = anyType.getVariationIndex(); + EvaluationReason reason = anyType.getReason(); + + this.asBoolean = value.getType() == LDValueType.BOOLEAN ? + EvaluationDetail.fromValue(Boolean.valueOf(value.booleanValue()), index, reason) : + WRONG_TYPE_BOOLEAN; + this.asInteger = value.isNumber() ? + EvaluationDetail.fromValue(Integer.valueOf(value.intValue()), index, reason) : + WRONG_TYPE_INTEGER; + this.asDouble = value.isNumber() ? + EvaluationDetail.fromValue(Double.valueOf(value.doubleValue()), index, reason) : + WRONG_TYPE_DOUBLE; + this.asString = value.isString() || value.isNull() ? + EvaluationDetail.fromValue(value.stringValue(), index, reason) : + WRONG_TYPE_STRING; + } + + private EvalResult(EvalResult from, EvaluationReason newReason) { + this.anyType = transformReason(from.anyType, newReason); + this.asBoolean = transformReason(from.asBoolean, newReason); + this.asInteger = transformReason(from.asInteger, newReason); + this.asDouble = transformReason(from.asDouble, newReason); + this.asString = transformReason(from.asString, newReason); + this.forceReasonTracking = from.forceReasonTracking; + } + + private EvalResult(EvalResult from, boolean newForceTracking) { + this.anyType = from.anyType; + this.asBoolean = from.asBoolean; + this.asInteger = from.asInteger; + this.asDouble = from.asDouble; + this.asString = from.asString; + this.forceReasonTracking = newForceTracking; + } + + /** + * Returns the result as an {@code EvaluationDetail} where the value is an {@code LDValue}, + * allowing it to be of any JSON type. + * + * @return the result properties + */ + public EvaluationDetail getAnyType() { + return anyType; + } + + /** + * Returns the result as an {@code EvaluationDetail} where the value is a {@code Boolean}. + * If the result was not a boolean, the returned object has a value of false and a reason + * that is a {@code WRONG_TYPE} error. + *

    + * Note: the "wrong type" logic is just a safety measure to ensure that we never return + * null. Normally, the result will already have been transformed by LDClient.evaluateInternal + * if the wrong type was requested. + * + * @return the result properties + */ + public EvaluationDetail getAsBoolean() { + return asBoolean; + } + + /** + * Returns the result as an {@code EvaluationDetail} where the value is an {@code Integer}. + * If the result was not a number, the returned object has a value of zero and a reason + * that is a {@code WRONG_TYPE} error (see {@link #getAsBoolean()}). + * + * @return the result properties + */ + public EvaluationDetail getAsInteger() { + return asInteger; + } + + /** + * Returns the result as an {@code EvaluationDetail} where the value is a {@code Double}. + * If the result was not a number, the returned object has a value of zero and a reason + * that is a {@code WRONG_TYPE} error (see {@link #getAsBoolean()}). + * + * @return the result properties + */ + public EvaluationDetail getAsDouble() { + return asDouble; + } + + /** + * Returns the result as an {@code EvaluationDetail} where the value is a {@code String}. + * If the result was not a string, the returned object has a value of {@code null} and a + * reason that is a {@code WRONG_TYPE} error (see {@link #getAsBoolean()}). + * + * @return the result properties + */ + public EvaluationDetail getAsString() { + return asString; + } + + /** + * Returns the result value, which may be of any JSON type. + * @return the result value + */ + public LDValue getValue() { return anyType.getValue(); } + + /** + * Returns the variation index, or {@link EvaluationDetail#NO_VARIATION} if evaluation failed + * @return the variation index or {@link EvaluationDetail#NO_VARIATION} + */ + public int getVariationIndex() { return anyType.getVariationIndex(); } + + /** + * Returns the evaluation reason. This is never null, even though we may not always put the + * reason into events. + * @return the evaluation reason + */ + public EvaluationReason getReason() { return anyType.getReason(); } + + /** + * Returns true if the variation index is {@link EvaluationDetail#NO_VARIATION}, indicating + * that evaluation failed or at least that no variation was selected. + * @return true if there is no variation + */ + public boolean isNoVariation() { return anyType.isDefaultValue(); } + + /** + * Returns true if we need to send an evaluation reason in event data whenever we get this + * result. This is true if any of the following are true: 1. the evaluation reason's + * inExperiment property was true, which can happen if the evaluation involved a rollout + * or experiment; 2. the evaluation reason was FALLTHROUGH, and the flag's trackEventsFallthrough + * property was true; 3. the evaluation reason was RULE_MATCH, and the rule-level trackEvents + * property was true. The consequence is that we will tell the event processor "definitely send + * a individual event for this evaluation, even if the flag-level trackEvents was not true", + * and also we will include the evaluation reason in the event even if the application did not + * call a VariationDetail method. + * @return true if reason tracking is required for this result + */ + public boolean isForceReasonTracking() { return forceReasonTracking; } + + /** + * Returns a transformed copy of this EvalResult with a different evaluation reason. + * @param newReason the new evaluation reason + * @return a transformed copy + */ + public EvalResult withReason(EvaluationReason newReason) { + return newReason.equals(this.anyType.getReason()) ? this : new EvalResult(this, newReason); + } + + /** + * Returns a transformed copy of this EvalResult with a different value for {@link #isForceReasonTracking()}. + * @param newValue the new value for the property + * @return a transformed copy + */ + public EvalResult withForceReasonTracking(boolean newValue) { + return this.forceReasonTracking == newValue ? this : new EvalResult(this, newValue); + } + + @Override + public boolean equals(Object other) { + if (other instanceof EvalResult) { + EvalResult o = (EvalResult)other; + return anyType.equals(o.anyType) && forceReasonTracking == o.forceReasonTracking; + } + return false; + } + + @Override + public int hashCode() { + return anyType.hashCode() + (forceReasonTracking ? 1 : 0); + } + + @Override + public String toString() { + if (forceReasonTracking) { + return anyType.toString() + "(forceReasonTracking=true)"; + } + return anyType.toString(); + } + + private static EvaluationDetail transformReason(EvaluationDetail from, EvaluationReason newReason) { + return from == null ? null : + EvaluationDetail.fromValue(from.getValue(), from.getVariationIndex(), newReason); + } + + private static EvaluationDetail wrongTypeWithValue(T value) { + return EvaluationDetail.fromValue(value, NO_VARIATION, EvaluationReason.error(ErrorKind.WRONG_TYPE)); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResultAndFlag.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResultAndFlag.java new file mode 100644 index 0000000..0cdb7a4 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvalResultAndFlag.java @@ -0,0 +1,22 @@ +package com.launchdarkly.sdk.server; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +class EvalResultAndFlag { + private final EvalResult result; + private final DataModel.FeatureFlag flag; + + EvalResultAndFlag(@NotNull EvalResult result, @Nullable DataModel.FeatureFlag flag) { + this.result = result; + this.flag = flag; + } + + EvalResult getResult() { + return result; + } + + DataModel.FeatureFlag getFlag() { + return flag; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationOptions.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationOptions.java new file mode 100644 index 0000000..b28629e --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationOptions.java @@ -0,0 +1,35 @@ +package com.launchdarkly.sdk.server; + +/** + * Container class for options that can be provided along with the evaluation invocation to influence various + * behavior of the evaluation. + */ +final class EvaluationOptions { + final boolean recordEvents; + final boolean includeReasonsWithEvents; + + /** + * @param recordEvents when true, events will be recorded while the evaluation is performed + * @param includeReasonsWithEvents when true, any events that are recorded will include reasons + */ + private EvaluationOptions(boolean recordEvents, boolean includeReasonsWithEvents) { + this.recordEvents = recordEvents; + this.includeReasonsWithEvents = includeReasonsWithEvents; + + } + + /** + * During evaluation, no events will be recorded. + */ + public static final EvaluationOptions NO_EVENTS = new EvaluationOptions(false, false); + + /** + * During evaluation, events will be recorded, but they will not include reasons. + */ + public static final EvaluationOptions EVENTS_WITHOUT_REASONS = new EvaluationOptions(true, false); + + /** + * During evaluation, events will be recorded and those events will include reasons. + */ + public static final EvaluationOptions EVENTS_WITH_REASONS = new EvaluationOptions(true, true); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationRecorder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationRecorder.java new file mode 100644 index 0000000..8dfd100 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluationRecorder.java @@ -0,0 +1,25 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +/** + * This interface exists to provide abstraction of event recording during the evaluation process so that it + * can be customized by the {@link EvaluationOptions} provided at the time of evaluation. This interface also + * helps organize the structure of recording events and helps ensure consistency. + */ +interface EvaluationRecorder { + default void recordEvaluation(DataModel.FeatureFlag flag, LDContext context, EvalResult result, LDValue defaultValue) { + // default is no op + } + default void recordPrerequisiteEvaluation(DataModel.FeatureFlag flag, DataModel.FeatureFlag prereqOfFlag, LDContext context, EvalResult result) { + // default is no op + } + default void recordEvaluationError(DataModel.FeatureFlag flag, LDContext context, LDValue defaultValue, EvaluationReason.ErrorKind errorKind) { + // default is no op + } + default void recordEvaluationUnknownFlagError(String flagKey, LDContext context, LDValue defaultValue, EvaluationReason.ErrorKind errorKind) { + // default is no op + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Evaluator.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Evaluator.java new file mode 100644 index 0000000..b605c16 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Evaluator.java @@ -0,0 +1,575 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.EvaluationReason.Kind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.Target; +import com.launchdarkly.sdk.server.DataModel.VariationOrRollout; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetList; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetLists; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.matchClauseByKind; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.matchClauseWithoutSegments; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.maybeNegate; + +/** + * Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment; + * if it needs to retrieve flags or segments that are referenced by a flag, it does so through a read-only interface + * that is provided in the constructor. It also produces evaluation records (to be used in event data) as appropriate + * for any referenced prerequisite flags. + */ +class Evaluator { + // + // IMPLEMENTATION NOTES ABOUT THIS FILE + // + // Flag evaluation is the hottest code path in the SDK; large applications may evaluate flags at a VERY high + // volume, so every little bit of optimization we can achieve here could add up to quite a bit of overhead we + // are not making the customer incur. Strategies that are used here for that purpose include: + // + // 1. Whenever possible, we are reusing precomputed instances of EvalResult; see DataModelPreprocessing and + // EvaluatorHelpers. + // + // 2. If prerequisite evaluations happen as a side effect of an evaluation, rather than building and returning + // a list of these, we deliver them one at a time via the PrerequisiteEvaluationSink callback mechanism. + // + // 3. If there's a piece of state that needs to be tracked across multiple methods during an evaluation, and + // it's not feasible to just pass it as a method parameter, consider adding it as a field in the mutable + // EvaluatorState object (which we will always have one of) rather than creating a new object to contain it. + // + // 4. Whenever possible, avoid using "for (variable: list)" here because it always creates an iterator object. + // Instead, use the tedious old "get the size, iterate with a counter" approach. + // + // 5. Avoid using lambdas/closures here, because these generally cause a heap object to be allocated for + // variables captured in the closure each time they are used. + // + + /** + * This key cannot exist in LaunchDarkly because it contains invalid characters. We use it in tests as a way to + * simulate an unexpected RuntimeException during flag evaluations. We check for it by reference equality, so + * the tests must use this exact constant. + */ + static final String INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION = "$ test error flag $"; + static final RuntimeException EXPECTED_EXCEPTION_FROM_INVALID_FLAG = new RuntimeException("deliberate test error"); + + private final Getters getters; + private final LDLogger logger; + + /** + * An abstraction of getting flags or segments by key. This ensures that Evaluator cannot modify the data store, + * and simplifies testing. + */ + static interface Getters { + + /** + * @param key of the flag to get + * @return the flag, or null if a flag with the given key doesn't exist + */ + @Nullable + FeatureFlag getFlag(String key); + + /** + * @param key of the segment to get + * @return the segment, or null if a segment with the given key doesn't exist. + */ + @Nullable + Segment getSegment(String key); + + BigSegmentStoreWrapper.BigSegmentsQueryResult getBigSegments(String key); + } + + /** + * Represents errors that should terminate evaluation, for situations where it's simpler to use throw/catch + * than to return an error result back up a call chain. + */ + @SuppressWarnings("serial") + static class EvaluationException extends RuntimeException { + final ErrorKind errorKind; + + EvaluationException(ErrorKind errorKind, String message) { + this.errorKind = errorKind; + } + } + + /** + * This object holds mutable state that Evaluator may need during an evaluation. + */ + private static class EvaluatorState { + private Map bigSegmentsMembership = null; + private EvaluationReason.BigSegmentsStatus bigSegmentsStatus = null; + private FeatureFlag originalFlag = null; + private List prerequisiteStack = null; + private List segmentStack = null; + } + + Evaluator(Getters getters, LDLogger logger) { + this.getters = getters; + this.logger = logger; + } + + /** + * The client's entry point for evaluating a flag. No other Evaluator methods should be exposed. + * + * @param flag an existing feature flag; any other referenced flags or segments will be queried via {@link Getters} + * @param context the evaluation context + * @param recorder records information as evaluation runs + * @return an {@link EvalResult} - guaranteed non-null + */ + EvalResult evaluate(FeatureFlag flag, LDContext context, @Nonnull EvaluationRecorder recorder) { + if (flag.getKey() == INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION) { + throw EXPECTED_EXCEPTION_FROM_INVALID_FLAG; + } + + EvaluatorState state = new EvaluatorState(); + state.originalFlag = flag; + + try { + EvalResult result = evaluateInternal(flag, context, recorder, state); + + if (state.bigSegmentsStatus != null) { + return result.withReason( + result.getReason().withBigSegmentsStatus(state.bigSegmentsStatus) + ); + } + return result; + } catch (EvaluationException e) { + logger.error("Could not evaluate flag \"{}\": {}", flag.getKey(), e.getMessage()); + return EvalResult.error(e.errorKind); + } + } + + private EvalResult evaluateInternal(FeatureFlag flag, LDContext context, @Nonnull EvaluationRecorder recorder, EvaluatorState state) { + if (!flag.isOn()) { + return EvaluatorHelpers.offResult(flag); + } + + EvalResult prereqFailureResult = checkPrerequisites(flag, context, recorder, state); + if (prereqFailureResult != null) { + return prereqFailureResult; + } + + // Check to see if targets match + EvalResult targetMatchResult = checkTargets(flag, context); + if (targetMatchResult != null) { + return targetMatchResult; + } + + // Now walk through the rules and see if any match + List rules = flag.getRules(); // guaranteed non-null + int nRules = rules.size(); + for (int i = 0; i < nRules; i++) { + Rule rule = rules.get(i); + if (ruleMatchesContext(flag, rule, context, state)) { + return computeRuleMatch(flag, context, rule, i); + } + } + // Walk through the fallthrough and see if it matches + return getValueForVariationOrRollout(flag, flag.getFallthrough(), context, + flag.preprocessed == null ? null : flag.preprocessed.fallthroughResults, + EvaluationReason.fallthrough()); + } + + // Checks prerequisites if any; returns null if successful, or an EvalResult if we have to + // short-circuit due to a prerequisite failure. + private EvalResult checkPrerequisites(FeatureFlag flag, LDContext context, @Nonnull EvaluationRecorder recorder, EvaluatorState state) { + List prerequisites = flag.getPrerequisites(); // guaranteed non-null + int nPrerequisites = prerequisites.size(); + if (nPrerequisites == 0) { + return null; + } + + try { + // We use the state object to guard against circular references in prerequisites. To avoid + // the overhead of creating the state.prerequisiteStack list in the most common case where + // there's only a single level prerequisites, we treat state.originalFlag as the first + // element in the stack. + if (flag != state.originalFlag) { + if (state.prerequisiteStack == null) { + state.prerequisiteStack = new ArrayList<>(); + } + state.prerequisiteStack.add(flag.getKey()); + } + + for (int i = 0; i < nPrerequisites; i++) { + Prerequisite prereq = prerequisites.get(i); + String prereqKey = prereq.getKey(); + + if (prereqKey.equals(state.originalFlag.getKey()) || + (flag != state.originalFlag && prereqKey.equals(flag.getKey())) || + (state.prerequisiteStack != null && state.prerequisiteStack.contains(prereqKey))) { + throw new EvaluationException(ErrorKind.MALFORMED_FLAG, + "prerequisite relationship to \"" + prereqKey + "\" caused a circular reference;" + + " this is probably a temporary condition due to an incomplete update"); + } + + boolean prereqOk = true; + FeatureFlag prereqFeatureFlag = getters.getFlag(prereq.getKey()); + if (prereqFeatureFlag == null) { + logger.error("Could not retrieve prerequisite flag \"{}\" when evaluating \"{}\"", prereq.getKey(), flag.getKey()); + prereqOk = false; + } else { + EvalResult prereqEvalResult = evaluateInternal(prereqFeatureFlag, context, recorder, state); + // Note that if the prerequisite flag is off, we don't consider it a match no matter what its + // off variation was. But we still need to evaluate it in order to generate an event. + if (!prereqFeatureFlag.isOn() || prereqEvalResult.getVariationIndex() != prereq.getVariation()) { + prereqOk = false; + } + recorder.recordPrerequisiteEvaluation(prereqFeatureFlag, flag, context, prereqEvalResult); + } + if (!prereqOk) { + return EvaluatorHelpers.prerequisiteFailedResult(flag, prereq); + } + } + return null; // all prerequisites were satisfied + } finally { + if (state.prerequisiteStack != null && !state.prerequisiteStack.isEmpty()) { + state.prerequisiteStack.remove(state.prerequisiteStack.size() - 1); + } + } + } + + private static EvalResult checkTargets( + FeatureFlag flag, + LDContext context + ) { + List contextTargets = flag.getContextTargets(); // guaranteed non-null + List userTargets = flag.getTargets(); // guaranteed non-null + int nContextTargets = contextTargets.size(); + int nUserTargets = userTargets.size(); + + if (nContextTargets == 0) { + // old-style data has only targets for users + if (nUserTargets != 0) { + LDContext userContext = context.getIndividualContext(ContextKind.DEFAULT); + if (userContext != null) { + for (int i = 0; i < nUserTargets; i++) { + Target t = userTargets.get(i); + if (t.getValues().contains(userContext.getKey())) { // getValues() is guaranteed non-null + return EvaluatorHelpers.targetMatchResult(flag, t); + } + } + } + } + return null; + } + + // new-style data has ContextTargets, which may include placeholders for user targets that are in Targets + for (int i = 0; i < nContextTargets; i++) { + Target t = contextTargets.get(i); + if (t.getContextKind() == null || t.getContextKind().isDefault()) { + LDContext userContext = context.getIndividualContext(ContextKind.DEFAULT); + if (userContext == null) { + continue; + } + for (int j = 0; j < nUserTargets; j++) { + Target ut = userTargets.get(j); + if (ut.getVariation() == t.getVariation()) { + if (ut.getValues().contains(userContext.getKey())) { + return EvaluatorHelpers.targetMatchResult(flag, t); + } + break; + } + } + } else { + if (contextKeyIsInTargetList(context, t.getContextKind(), t.getValues())) { + return EvaluatorHelpers.targetMatchResult(flag, t); + } + } + } + return null; + } + + private EvalResult getValueForVariationOrRollout( + FeatureFlag flag, + VariationOrRollout vr, + LDContext context, + DataModelPreprocessing.EvalResultFactoryMultiVariations precomputedResults, + EvaluationReason reason + ) { + int variation = -1; + boolean inExperiment = false; + Integer maybeVariation = vr.getVariation(); + if (maybeVariation != null) { + variation = maybeVariation.intValue(); + } else { + Rollout rollout = vr.getRollout(); + if (rollout != null && !rollout.getVariations().isEmpty()) { + float bucket = computeBucketValue( + rollout.isExperiment(), + rollout.getSeed(), + context, + rollout.getContextKind(), + flag.getKey(), + rollout.getBucketBy(), + flag.getSalt() + ); + boolean contextWasFound = bucket >= 0; // see comment on computeBucketValue + float sum = 0F; + List variations = rollout.getVariations(); // guaranteed non-null + int nVariations = variations.size(); + for (int i = 0; i < nVariations; i++) { + WeightedVariation wv = variations.get(i); + sum += (float) wv.getWeight() / 100000F; + if (bucket < sum) { + variation = wv.getVariation(); + inExperiment = vr.getRollout().isExperiment() && !wv.isUntracked() && contextWasFound; + break; + } + } + if (variation < 0) { + // The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + // to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag + // data could contain buckets that don't actually add up to 100000. Rather than returning an error in + // this case (or changing the scaling, which would potentially change the results for *all* users), we + // will simply put the user in the last bucket. + WeightedVariation lastVariation = rollout.getVariations().get(rollout.getVariations().size() - 1); + variation = lastVariation.getVariation(); + inExperiment = vr.getRollout().isExperiment() && !lastVariation.isUntracked(); + } + } + } + + if (variation < 0) { + logger.error("Data inconsistency in feature flag \"{}\": variation/rollout object with no variation or rollout", flag.getKey()); + return EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG); + } + // Normally, we will always have precomputedResults + if (precomputedResults != null) { + return precomputedResults.forVariation(variation, inExperiment); + } + // If for some reason we don't, synthesize an equivalent result + return EvalResult.of(EvaluatorHelpers.evaluationDetailForVariation( + flag, variation, inExperiment ? experimentize(reason) : reason)); + } + + private static EvaluationReason experimentize(EvaluationReason reason) { + if (reason.getKind() == Kind.FALLTHROUGH) { + return EvaluationReason.fallthrough(true); + } else if (reason.getKind() == Kind.RULE_MATCH) { + return EvaluationReason.ruleMatch(reason.getRuleIndex(), reason.getRuleId(), true); + } + return reason; + } + + private boolean ruleMatchesContext(FeatureFlag flag, Rule rule, LDContext context, EvaluatorState state) { + List clauses = rule.getClauses(); // guaranteed non-null + int nClauses = clauses.size(); + for (int i = 0; i < nClauses; i++) { + Clause clause = clauses.get(i); + if (!clauseMatchesContext(clause, context, state)) { + return false; + } + } + return true; + } + + private boolean clauseMatchesContext(Clause clause, LDContext context, EvaluatorState state) { + if (clause.getOp() == Operator.segmentMatch) { + return maybeNegate(clause, matchAnySegment(clause.getValues(), context, state)); + } + AttributeRef attr = clause.getAttribute(); + if (attr == null) { + throw new EvaluationException(ErrorKind.MALFORMED_FLAG, "rule clause did not specify an attribute"); + } + if (!attr.isValid()) { + throw new EvaluationException(ErrorKind.MALFORMED_FLAG, + "invalid attribute reference \"" + attr.getError() + "\""); + } + if (attr.getDepth() == 1 && attr.getComponent(0).equals("kind")) { + return maybeNegate(clause, matchClauseByKind(clause, context)); + } + LDContext actualContext = context.getIndividualContext(clause.getContextKind()); + if (actualContext == null) { + return false; + } + LDValue contextValue = actualContext.getValue(attr); + if (contextValue.isNull()) { + return false; + } + + if (contextValue.getType() == LDValueType.ARRAY) { + int nValues = contextValue.size(); + for (int i = 0; i < nValues; i++) { + LDValue value = contextValue.get(i); + if (matchClauseWithoutSegments(clause, value)) { + return maybeNegate(clause, true); + } + } + return maybeNegate(clause, false); + } else if (contextValue.getType() != LDValueType.OBJECT) { + return maybeNegate(clause, matchClauseWithoutSegments(clause, contextValue)); + } + return false; + } + + private boolean matchAnySegment(List values, LDContext context, EvaluatorState state) { + // For the segmentMatch operator, the values list is really a list of segment keys. We + // return a match if any of these segments matches the context. + int nValues = values.size(); + for (int i = 0; i < nValues; i++) { + LDValue clauseValue = values.get(i); + if (!clauseValue.isString()) { + continue; + } + String segmentKey = clauseValue.stringValue(); + if (state.segmentStack != null) { + // Clauses within a segment can reference other segments, so we don't want to get stuck in a cycle. + if (state.segmentStack.contains(segmentKey)) { + throw new EvaluationException(ErrorKind.MALFORMED_FLAG, + "segment rule referencing segment \"" + segmentKey + "\" caused a circular reference;" + + " this is probably a temporary condition due to an incomplete update"); + } + } + Segment segment = getters.getSegment(segmentKey); + if (segment != null) { + if (segmentMatchesContext(segment, context, state)) { + return true; + } + } + } + return false; + } + + private boolean segmentMatchesContext(Segment segment, LDContext context, EvaluatorState state) { + if (segment.isUnbounded()) { + if (segment.getGeneration() == null) { + // Big Segment queries can only be done if the generation is known. If it's unset, that + // probably means the data store was populated by an older SDK that doesn't know about the + // generation property and therefore dropped it from the JSON data. We'll treat that as a + // "not configured" condition. + state.bigSegmentsStatus = EvaluationReason.BigSegmentsStatus.NOT_CONFIGURED; + return false; + } + LDContext matchContext = context.getIndividualContext(segment.getUnboundedContextKind()); + if (matchContext == null) { + return false; + } + String key = matchContext.getKey(); + BigSegmentStoreTypes.Membership membershipData = + state.bigSegmentsMembership == null ? null : state.bigSegmentsMembership.get(key); + if (membershipData == null) { + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = getters.getBigSegments(key); + if (queryResult == null) { + // The SDK hasn't been configured to be able to use big segments + state.bigSegmentsStatus = EvaluationReason.BigSegmentsStatus.NOT_CONFIGURED; + } else { + membershipData = queryResult.membership; + state.bigSegmentsStatus = queryResult.status; + if (state.bigSegmentsMembership == null) { + state.bigSegmentsMembership = new HashMap<>(); + } + state.bigSegmentsMembership.put(key, membershipData); + } + } + Boolean membershipResult = membershipData == null ? null : + membershipData.checkMembership(makeBigSegmentRef(segment)); + if (membershipResult != null) { + return membershipResult.booleanValue(); + } + } else { + if (contextKeyIsInTargetList(context, ContextKind.DEFAULT, segment.getIncluded())) { + return true; + } + if (contextKeyIsInTargetLists(context, segment.getIncludedContexts())) { + return true; + } + if (contextKeyIsInTargetList(context, ContextKind.DEFAULT, segment.getExcluded())) { + return false; + } + if (contextKeyIsInTargetLists(context, segment.getExcludedContexts())) { + return false; + } + } + List rules = segment.getRules(); // guaranteed non-null + if (!rules.isEmpty()) { + // Evaluating rules means we might be doing recursive segment matches, so we'll push the current + // segment key onto the stack for cycle detection. + if (state.segmentStack == null) { + state.segmentStack = new ArrayList<>(); + } + state.segmentStack.add(segment.getKey()); + int nRules = rules.size(); + try { + for (int i = 0; i < nRules; i++) { + SegmentRule rule = rules.get(i); + if (segmentRuleMatchesContext(rule, context, state, segment.getKey(), segment.getSalt())) { + return true; + } + } + } finally { + state.segmentStack.remove(state.segmentStack.size() - 1); + } + } + return false; + } + + private boolean segmentRuleMatchesContext( + SegmentRule segmentRule, + LDContext context, + EvaluatorState state, + String segmentKey, + String salt + ) { + List clauses = segmentRule.getClauses(); // guaranteed non-null + int nClauses = clauses.size(); + for (int i = 0; i < nClauses; i++) { + Clause c = clauses.get(i); + if (!clauseMatchesContext(c, context, state)) { + return false; + } + } + + // If the Weight is absent, this rule matches + if (segmentRule.getWeight() == null) { + return true; + } + + // All of the clauses are met. See if the context buckets in + double bucket = computeBucketValue( + false, + null, + context, + segmentRule.getRolloutContextKind(), + segmentKey, + segmentRule.getBucketBy(), + salt + ); + double weight = (double) segmentRule.getWeight() / 100000.0; + return bucket < weight; + } + + private EvalResult computeRuleMatch(FeatureFlag flag, LDContext context, Rule rule, int ruleIndex) { + if (rule.preprocessed != null) { + return getValueForVariationOrRollout(flag, rule, context, rule.preprocessed.allPossibleResults, null); + } + EvaluationReason reason = EvaluationReason.ruleMatch(ruleIndex, rule.getId()); + return getValueForVariationOrRollout(flag, rule, context, null, reason); + } + + static String makeBigSegmentRef(Segment segment) { + return String.format("%s.g%d", segment.getKey(), segment.getGeneration()); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java new file mode 100644 index 0000000..2a96663 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorBucketing.java @@ -0,0 +1,76 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import org.apache.commons.codec.digest.DigestUtils; + +/** + * Encapsulates the logic for percentage rollouts. + */ +abstract class EvaluatorBucketing { + private EvaluatorBucketing() {} + + private static final float LONG_SCALE = (float) 0xFFFFFFFFFFFFFFFL; + + // Computes a bucket value for a rollout or experiment. If an error condition prevents + // us from computing a valid bucket value, we return 0, which will cause the evaluator + // to select the first bucket. A special case is if no context of the desired kind is + // found, in which case we return the special value -1; this similarly will cause the + // first bucket to be chosen (since it is less than the end value of the bucket, just + // as 0 is), but also tells the evaluator that inExperiment must be set to false. + static float computeBucketValue( + boolean isExperiment, + Integer seed, + LDContext context, + ContextKind contextKind, + String flagOrSegmentKey, + AttributeRef attr, + String salt + ) { + LDContext matchContext = context.getIndividualContext(contextKind); + if (matchContext == null) { + return -1; + } + LDValue contextValue; + if (isExperiment || attr == null) { + contextValue = LDValue.of(matchContext.getKey()); + } else { + if (!attr.isValid()) { + return 0; + } + contextValue = matchContext.getValue(attr); + if (contextValue.isNull()) { + return 0; + } + } + + String idHash = getBucketableStringValue(contextValue); + if (idHash == null) { + return 0; + } + + String prefix; + if (seed != null) { + prefix = seed.toString(); + } else { + prefix = flagOrSegmentKey + "." + salt; + } + String hash = DigestUtils.sha1Hex(prefix + "." + idHash).substring(0, 15); + long longVal = Long.parseLong(hash, 16); + return (float) longVal / LONG_SCALE; + } + + private static String getBucketableStringValue(LDValue userValue) { + switch (userValue.getType()) { + case STRING: + return userValue.stringValue(); + case NUMBER: + return userValue.isInt() ? String.valueOf(userValue.intValue()) : null; + default: + return null; + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java new file mode 100644 index 0000000..2e49369 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorHelpers.java @@ -0,0 +1,146 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.SegmentTarget; +import com.launchdarkly.sdk.server.DataModel.Target; +import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorHelpers.contextKeyIsInTargetList; + +/** + * Low-level helpers for producing various kinds of evaluation results. We also put any + * helpers here that are used by Evaluator if they are static, i.e. if they can be + * implemented without reference to the Evaluator instance's own state, so as to keep the + * Evaluator logic smaller and easier to follow. + *

    + * For all of the methods that return an {@link EvalResult}, the behavior is as follows: + * First we check if the flag data contains a preprocessed value for this kind of result; if + * so, we return that same EvalResult instance, for efficiency. That will normally always be + * the case, because preprocessing happens as part of deserializing a flag. But if somehow + * no preprocessed value is available, we construct one less efficiently on the fly. (The + * reason we can't absolutely guarantee that the preprocessed data is available, by putting + * it in a constructor, is because of how deserialization works: Gson doesn't pass values to + * a constructor, it sets fields directly, so we have to run our preprocessing logic after.) + */ +abstract class EvaluatorHelpers { + static EvalResult offResult(FeatureFlag flag) { + if (flag.preprocessed != null) { + return flag.preprocessed.offResult; + } + return EvalResult.of(evaluationDetailForOffVariation(flag, EvaluationReason.off())); + } + + static EvalResult targetMatchResult(FeatureFlag flag, Target target) { + if (target.preprocessed != null) { + return target.preprocessed.targetMatchResult; + } + return EvalResult.of(evaluationDetailForVariation( + flag, target.getVariation(), EvaluationReason.targetMatch())); + } + + static EvalResult prerequisiteFailedResult(FeatureFlag flag, Prerequisite prereq) { + if (prereq.preprocessed != null) { + return prereq.preprocessed.prerequisiteFailedResult; + } + return EvalResult.of(evaluationDetailForOffVariation( + flag, EvaluationReason.prerequisiteFailed(prereq.getKey()))); + } + + static EvaluationDetail evaluationDetailForOffVariation(FeatureFlag flag, EvaluationReason reason) { + Integer offVariation = flag.getOffVariation(); + if (offVariation == null) { // off variation unspecified - return default value + return EvaluationDetail.fromValue(LDValue.ofNull(), NO_VARIATION, reason); + } + return evaluationDetailForVariation(flag, offVariation, reason); + } + + static EvaluationDetail evaluationDetailForVariation(FeatureFlag flag, int variation, EvaluationReason reason) { + if (variation < 0 || variation >= flag.getVariations().size()) { + return EvaluationDetail.fromValue(LDValue.ofNull(), NO_VARIATION, + EvaluationReason.error(ErrorKind.MALFORMED_FLAG)); + } + return EvaluationDetail.fromValue( + LDValue.normalize(flag.getVariations().get(variation)), + variation, + reason); + } + + static boolean maybeNegate(Clause clause, boolean b) { + return clause.isNegate() ? !b : b; + } + + // Performs an operator test between a single context value and all of the clause values, for any + // operator except segmentMatch. + static boolean matchClauseWithoutSegments(Clause clause, LDValue contextValue) { + Operator op = clause.getOp(); + if (op != null) { + ClausePreprocessed preprocessed = clause.preprocessed; + if (op == Operator.in) { + // see if we have precomputed a Set for fast equality matching + Set vs = preprocessed == null ? null : preprocessed.valuesSet; + if (vs != null) { + return vs.contains(contextValue); + } + } + List values = clause.getValues(); + List preprocessedValues = + preprocessed == null ? null : preprocessed.valuesExtra; + int n = values.size(); + for (int i = 0; i < n; i++) { + // the preprocessed list, if present, will always have the same size as the values list + ClausePreprocessed.ValueData p = preprocessedValues == null ? null : preprocessedValues.get(i); + LDValue v = values.get(i); + if (EvaluatorOperators.apply(op, contextValue, v, p)) { + return true; + } + } + } + return false; + } + + static boolean matchClauseByKind(Clause clause, LDContext context) { + // If attribute is "kind", then we treat operator and values as a match expression against a list + // of all individual kinds in the context. That is, for a multi-kind context with kinds of "org" + // and "user", it is a match if either of those strings is a match with Operator and Values. + for (int i = 0; i < context.getIndividualContextCount(); i++) { + if (matchClauseWithoutSegments(clause, LDValue.of( + context.getIndividualContext(i).getKind().toString()))) { + return true; + } + } + return false; + } + + static boolean contextKeyIsInTargetList(LDContext context, ContextKind contextKind, Collection keys) { + if (keys.isEmpty()) { + return false; + } + LDContext matchContext = context.getIndividualContext(contextKind); + return matchContext != null && keys.contains(matchContext.getKey()); + } + + static boolean contextKeyIsInTargetLists(LDContext context, List targets) { + int nTargets = targets.size(); + for (int i = 0; i < nTargets; i++) { + SegmentTarget t = targets.get(i); + if (contextKeyIsInTargetList(context, t.getContextKind(), t.getValues())) { + return true; + } + } + return false; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorInterface.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorInterface.java new file mode 100644 index 0000000..60d4b90 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorInterface.java @@ -0,0 +1,38 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; + +/** + * An Evaluator is able to calculate evaluation results for flags against the provided context. + */ +public interface EvaluatorInterface { + + /** + * Evaluates the provided flag. + * + * @param method the top level customer facing method that led to this invocation + * @param flagKey of the flag that will be evaluated + * @param context to use during the evaluation + * @param defaultValue the value that will be returned in the result if an issue prevents the evaluator from + * successfully calculating an evaluation result. + * @param requireType that will be asserted against the evaluator's result. If the assertion fails, the default + * value is used in the returned result. + * @param options that are used to control more specific behavior of the evaluation + * @return the evaluation result and flag object + */ + EvalResultAndFlag evalAndFlag(String method, String flagKey, LDContext context, LDValue defaultValue, + LDValueType requireType, EvaluationOptions options); + + /** + * Evaluates all flags. + *

    + * It is up to each implementation whether events will be logged during evaluation. + * + * @param context to use during the evaluation + * @param options optional {@link FlagsStateOption} values affecting how the state is computed + * @return a {@link FeatureFlagsState} object (will never be null; see {@link FeatureFlagsState#isValid()} + */ + FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java new file mode 100644 index 0000000..635e0ac --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorOperators.java @@ -0,0 +1,124 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed; + +import java.time.Instant; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.regex.Pattern; + +import static com.launchdarkly.sdk.server.EvaluatorTypeConversion.valueToDateTime; +import static com.launchdarkly.sdk.server.EvaluatorTypeConversion.valueToRegex; +import static com.launchdarkly.sdk.server.EvaluatorTypeConversion.valueToSemVer; + +/** + * Defines the behavior of all operators that can be used in feature flag rules and segment rules. + */ +abstract class EvaluatorOperators { + private EvaluatorOperators() {} + + private static interface OperatorFn { + boolean match(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed); + } + + private static final Map OPERATORS = new HashMap<>(); + static { + OPERATORS.put(Operator.in, EvaluatorOperators::applyIn); + OPERATORS.put(Operator.startsWith, EvaluatorOperators::applyStartsWith); + OPERATORS.put(Operator.endsWith, EvaluatorOperators::applyEndsWith); + OPERATORS.put(Operator.matches, EvaluatorOperators::applyMatches); + OPERATORS.put(Operator.contains, EvaluatorOperators::applyContains); + OPERATORS.put(Operator.lessThan, numericComparison(delta -> delta < 0)); + OPERATORS.put(Operator.lessThanOrEqual, numericComparison(delta -> delta <= 0)); + OPERATORS.put(Operator.greaterThan, numericComparison(delta -> delta > 0)); + OPERATORS.put(Operator.greaterThanOrEqual, numericComparison(delta -> delta >= 0)); + OPERATORS.put(Operator.before, dateComparison(delta -> delta < 0)); + OPERATORS.put(Operator.after, dateComparison(delta -> delta > 0)); + OPERATORS.put(Operator.semVerEqual, semVerComparison(delta -> delta == 0)); + OPERATORS.put(Operator.semVerLessThan, semVerComparison(delta -> delta < 0)); + OPERATORS.put(Operator.semVerGreaterThan, semVerComparison(delta -> delta > 0)); + // Operator.segmentMatch is deliberately not included here, because it is implemented + // separately in Evaluator. + } + + static boolean apply( + DataModel.Operator op, + LDValue userValue, + LDValue clauseValue, + ClausePreprocessed.ValueData preprocessed + ) { + OperatorFn fn = OPERATORS.get(op); + return fn != null && fn.match(userValue, clauseValue, preprocessed); + } + + static boolean applyIn(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) { + return userValue.equals(clauseValue); + } + + static boolean applyStartsWith(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) { + return userValue.isString() && clauseValue.isString() && userValue.stringValue().startsWith(clauseValue.stringValue()); + } + + static boolean applyEndsWith(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) { + return userValue.isString() && clauseValue.isString() && userValue.stringValue().endsWith(clauseValue.stringValue()); + } + + static boolean applyMatches(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) { + // If preprocessed is non-null, it means we've already tried to parse the clause value as a regex, + // in which case if preprocessed.parsedRegex is null it was not a valid regex. + Pattern clausePattern = preprocessed == null ? valueToRegex(clauseValue) : preprocessed.parsedRegex; + return clausePattern != null && userValue.isString() && + clausePattern.matcher(userValue.stringValue()).find(); + } + + static boolean applyContains(LDValue userValue, LDValue clauseValue, ClausePreprocessed.ValueData preprocessed) { + return userValue.isString() && clauseValue.isString() && userValue.stringValue().contains(clauseValue.stringValue()); + } + + static OperatorFn numericComparison(Function comparisonTest) { + return (userValue, clauseValue, preprocessed) -> { + if (!userValue.isNumber() || !clauseValue.isNumber()) { + return false; + } + double n1 = userValue.doubleValue(); + double n2 = clauseValue.doubleValue(); + int delta = n1 == n2 ? 0 : (n1 < n2 ? -1 : 1); + return comparisonTest.apply(delta); + }; + } + + static OperatorFn dateComparison(Function comparisonTest) { + return (userValue, clauseValue, preprocessed) -> { + // If preprocessed is non-null, it means we've already tried to parse the clause value as a date/time, + // in which case if preprocessed.parsedDate is null it was not a valid date/time. + Instant clauseDate = preprocessed == null ? valueToDateTime(clauseValue) : preprocessed.parsedDate; + if (clauseDate == null) { + return false; + } + Instant userDate = valueToDateTime(userValue); + if (userDate == null) { + return false; + } + return comparisonTest.apply(userDate.compareTo(clauseDate)); + }; + } + + static OperatorFn semVerComparison(Function comparisonTest) { + return (userValue, clauseValue, preprocessed) -> { + // If preprocessed is non-null, it means we've already tried to parse the clause value as a version, + // in which case if preprocessed.parsedSemVer is null it was not a valid version. + SemanticVersion clauseVer = preprocessed == null ? valueToSemVer(clauseValue) : preprocessed.parsedSemVer; + if (clauseVer == null) { + return false; + } + SemanticVersion userVer = valueToSemVer(userValue); + if (userVer == null) { + return false; + } + return comparisonTest.apply(userVer.compareTo(clauseVer)); + }; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorTypeConversion.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorTypeConversion.java new file mode 100644 index 0000000..f23dd90 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorTypeConversion.java @@ -0,0 +1,48 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDValue; + +import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +abstract class EvaluatorTypeConversion { + private EvaluatorTypeConversion() {} + + static Instant valueToDateTime(LDValue value) { + if (value.isNumber()) { + return Instant.ofEpochMilli(value.longValue()); + } else if (value.isString()) { + try { + return ZonedDateTime.parse(value.stringValue()).toInstant(); + } catch (Throwable t) { + return null; + } + } else { + return null; + } + } + + static Pattern valueToRegex(LDValue value) { + if (!value.isString()) { + return null; + } + try { + return Pattern.compile(value.stringValue()); + } catch (PatternSyntaxException e) { + return null; + } + } + + static SemanticVersion valueToSemVer(LDValue value) { + if (!value.isString()) { + return null; + } + try { + return SemanticVersion.parse(value.stringValue(), true); + } catch (SemanticVersion.InvalidVersionException e) { + return null; + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorWithHooks.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorWithHooks.java new file mode 100644 index 0000000..fc6e5e7 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EvaluatorWithHooks.java @@ -0,0 +1,74 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.server.integrations.EvaluationSeriesContext; +import com.launchdarkly.sdk.server.integrations.Hook; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * An {@link EvaluatorInterface} that will invoke the evaluation series methods of the provided {@link Hook} when + * evaluations are made. + */ +class EvaluatorWithHooks implements EvaluatorInterface { + + private final EvaluatorInterface underlyingEvaluator; + private final List hooks; + private final LDLogger logger; + + /** + * @param underlyingEvaluator that will do the actual flag evaluation + * @param hooks that will be invoked at various stages of the evaluation series + * @param hooksLogger that will be used to log + */ + EvaluatorWithHooks(EvaluatorInterface underlyingEvaluator, List hooks, LDLogger hooksLogger) { + this.underlyingEvaluator = underlyingEvaluator; + this.hooks = hooks; + this.logger = hooksLogger; + } + + @Override + public EvalResultAndFlag evalAndFlag(String method, String featureKey, LDContext context, LDValue defaultValue, LDValueType requireType, EvaluationOptions options) { + // Each hook will have an opportunity to provide series data to carry along to later stages. This list + // is to track that data. + List seriesDataList = new ArrayList<>(hooks.size()); + + EvaluationSeriesContext seriesContext = new EvaluationSeriesContext(method, featureKey, context, defaultValue); + for (int i = 0; i < hooks.size(); i++) { + Hook currentHook = hooks.get(i); + try { + Map seriesData = currentHook.beforeEvaluation(seriesContext, Collections.emptyMap()); + seriesDataList.add(Collections.unmodifiableMap(seriesData)); // make data immutable + } catch (Exception e) { + seriesDataList.add(Collections.emptyMap()); // since the provided hook failed to execute, we default the series data to an empty map in this case + logger.error("During evaluation of flag \"{}\". Stage \"BeforeEvaluation\" of hook \"{}\" reported error: {}", featureKey, currentHook.getMetadata().getName(), e.toString()); + } + } + + EvalResultAndFlag result = underlyingEvaluator.evalAndFlag(method, featureKey, context, defaultValue, requireType, options); + + // Invoke hooks in reverse order and give them back the series data they gave us. + for (int i = hooks.size() - 1; i >= 0; i--) { + Hook currentHook = hooks.get(i); + try { + currentHook.afterEvaluation(seriesContext, seriesDataList.get(i), result.getResult().getAnyType()); + } catch (Exception e) { + logger.error("During evaluation of flag \"{}\". Stage \"AfterEvaluation\" of hook \"{}\" reported error: {}", featureKey, currentHook.getMetadata().getName(), e.toString()); + } + } + + return result; + } + + @Override + public FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options) { + // We do not support hooks for when all flags are evaluated. Perhaps in the future that will be added. + return underlyingEvaluator.allFlagsState(context, options); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EventBroadcasterImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EventBroadcasterImpl.java new file mode 100644 index 0000000..c76463f --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/EventBroadcasterImpl.java @@ -0,0 +1,117 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; + +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutorService; +import java.util.function.BiConsumer; + +/** + * A generic mechanism for registering event listeners and broadcasting events to them. The SDK maintains an + * instance of this for each available type of listener (flag change, data store status, etc.). They are all + * intended to share a single executor service; notifications are submitted individually to this service for + * each listener. + * + * @param the listener interface class + * @param the event class + */ +class EventBroadcasterImpl { + private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); + private final BiConsumer broadcastAction; + private final ExecutorService executor; + private final LDLogger logger; + + /** + * Creates an instance. + * + * @param broadcastAction a lambda that calls the appropriate listener method for an event + * @param executor the executor to use for running notification tasks on a worker thread; if this + * is null (which should only be the case in test code) then broadcasting an event will be a no-op + */ + EventBroadcasterImpl( + BiConsumer broadcastAction, + ExecutorService executor, + LDLogger logger + ) { + this.broadcastAction = broadcastAction; + this.executor = executor; + this.logger = logger; + } + + static EventBroadcasterImpl forFlagChangeEvents( + ExecutorService executor, LDLogger logger) { + return new EventBroadcasterImpl<>(FlagChangeListener::onFlagChange, executor, logger); + } + + static EventBroadcasterImpl + forDataSourceStatus(ExecutorService executor, LDLogger logger) { + return new EventBroadcasterImpl<>(DataSourceStatusProvider.StatusListener::dataSourceStatusChanged, + executor, logger); + } + + static EventBroadcasterImpl + forDataStoreStatus(ExecutorService executor, LDLogger logger) { + return new EventBroadcasterImpl<>(DataStoreStatusProvider.StatusListener::dataStoreStatusChanged, + executor, logger); + } + + static EventBroadcasterImpl + forBigSegmentStoreStatus(ExecutorService executor, LDLogger logger) { + return new EventBroadcasterImpl<>(BigSegmentStoreStatusProvider.StatusListener::bigSegmentStoreStatusChanged, + executor, logger); + } + + /** + * Registers a listener for this type of event. This method is thread-safe. + * + * @param listener the listener to register + */ + void register(ListenerT listener) { + listeners.add(listener); + } + + /** + * Unregisters a listener. This method is thread-safe. + * + * @param listener the listener to unregister + */ + void unregister(ListenerT listener) { + listeners.remove(listener); + } + + /** + * Returns true if any listeners are currently registered. This method is thread-safe. + * + * @return true if there are listeners + */ + boolean hasListeners() { + return !listeners.isEmpty(); + } + + /** + * Broadcasts an event to all available listeners. + * + * @param event the event to broadcast + */ + void broadcast(EventT event) { + if (executor == null) { + return; + } + for (ListenerT l: listeners) { + executor.execute(() -> { + try { + broadcastAction.accept(l, event); + } catch (Exception e) { + logger.warn("Unexpected error from listener ({}): {}", l.getClass(), LogValues.exceptionSummary(e)); + logger.debug("{}", LogValues.exceptionTrace(e)); + } + }); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java new file mode 100644 index 0000000..e48f037 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureFlagsState.java @@ -0,0 +1,388 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import com.google.gson.TypeAdapter; +import com.google.gson.annotations.JsonAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.json.JsonSerializable; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstanceWithNullsAllowed; + +/** + * A snapshot of the state of all feature flags with regard to a specific user, generated by + * calling {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}. + *

    + * LaunchDarkly defines a standard JSON encoding for this object, suitable for + * bootstrapping + * the LaunchDarkly JavaScript browser SDK. You can convert it to JSON in any of these ways: + *

      + *
    1. With {@link com.launchdarkly.sdk.json.JsonSerialization}. + *
    2. With Gson, if and only if you configure your {@code Gson} instance with + * {@link com.launchdarkly.sdk.json.LDGson}. + *
    3. With Jackson, if and only if you configure your {@code ObjectMapper} instance with + * {@link com.launchdarkly.sdk.json.LDJackson}. + *
    + * + * @since 4.3.0 + */ +@JsonAdapter(FeatureFlagsState.JsonSerialization.class) +public final class FeatureFlagsState implements JsonSerializable { + private final ImmutableMap flagMetadata; + private final boolean valid; + + static class FlagMetadata { + final LDValue value; + final Integer variation; + final EvaluationReason reason; + final Integer version; + final boolean trackEvents; + final boolean trackReason; + final Long debugEventsUntilDate; + + FlagMetadata(LDValue value, Integer variation, EvaluationReason reason, Integer version, + boolean trackEvents, boolean trackReason, Long debugEventsUntilDate) { + this.value = LDValue.normalize(value); + this.variation = variation; + this.reason = reason; + this.version = version; + this.trackEvents = trackEvents; + this.trackReason = trackReason; + this.debugEventsUntilDate = debugEventsUntilDate; + } + + @Override + public boolean equals(Object other) { + if (other instanceof FlagMetadata) { + FlagMetadata o = (FlagMetadata)other; + return value.equals(o.value) && + Objects.equals(variation, o.variation) && + Objects.equals(reason, o.reason) && + Objects.equals(version, o.version) && + trackEvents == o.trackEvents && + trackReason == o.trackReason && + Objects.equals(debugEventsUntilDate, o.debugEventsUntilDate); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(variation, version, trackEvents, trackReason, debugEventsUntilDate); + } + } + + private FeatureFlagsState(ImmutableMap flagMetadata, boolean valid) { + this.flagMetadata = flagMetadata; + this.valid = valid; + } + + /** + * Returns a {@link Builder} for creating instances. + *

    + * Application code will not normally use this builder, since the SDK creates its own instances. + * However, it may be useful in testing, to simulate values that might be returned by + * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}. + * + * @param options the same {@link FlagsStateOption}s, if any, that would be passed to + * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)} + * @return a builder object + * @since 5.6.0 + */ + public static Builder builder(FlagsStateOption... options) { + return new Builder(options); + } + + /** + * Returns true if this object contains a valid snapshot of feature flag state, or false if the + * state could not be computed (for instance, because the client was offline or there was no user). + * @return true if the state is valid + */ + public boolean isValid() { + return valid; + } + + /** + * Returns the value of an individual feature flag at the time the state was recorded. + * @param key the feature flag key + * @return the flag's JSON value; {@link LDValue#ofNull()} if the flag returned the default value; + * {@code null} if there was no such flag + */ + public LDValue getFlagValue(String key) { + FlagMetadata data = flagMetadata.get(key); + return data == null ? null : data.value; + } + + /** + * Returns the evaluation reason for an individual feature flag at the time the state was recorded. + * @param key the feature flag key + * @return an {@link EvaluationReason}; null if reasons were not recorded, or if there was no such flag + */ + public EvaluationReason getFlagReason(String key) { + FlagMetadata data = flagMetadata.get(key); + return data == null ? null : data.reason; + } + + /** + * Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, + * its value will be null. + *

    + * The returned map is unmodifiable. + *

    + * Do not use this method if you are passing data to the front end to "bootstrap" the JavaScript client. + * Instead, serialize the FeatureFlagsState object to JSON using {@code Gson.toJson()} or {@code Gson.toJsonTree()}. + * @return an immutable map of flag keys to JSON values + */ + public Map toValuesMap() { + return Maps.transformValues(flagMetadata, v -> v.value); + } + + @Override + public boolean equals(Object other) { + if (other instanceof FeatureFlagsState) { + FeatureFlagsState o = (FeatureFlagsState)other; + return flagMetadata.equals(o.flagMetadata) && + valid == o.valid; + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(flagMetadata, valid); + } + + /** + * A builder for a {@link FeatureFlagsState} instance. + *

    + * Application code will not normally use this builder, since the SDK creates its own instances. + * However, it may be useful in testing, to simulate values that might be returned by + * {@link LDClient#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}. + * + * @since 5.6.0 + */ + public static class Builder { + private ImmutableMap.Builder flagMetadata = ImmutableMap.builder(); + private final boolean saveReasons; + private final boolean detailsOnlyForTrackedFlags; + private boolean valid = true; + + private Builder(FlagsStateOption... options) { + saveReasons = FlagsStateOption.hasOption(options, FlagsStateOption.WITH_REASONS); + detailsOnlyForTrackedFlags = FlagsStateOption.hasOption(options, FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS); + } + + /** + * Sets the {@link FeatureFlagsState#isValid()} property. This is true by default. + * + * @param valid the new property value + * @return the builder + */ + public Builder valid(boolean valid) { + this.valid = valid; + return this; + } + + /** + * Adds data to the builder representing the result of a feature flag evaluation. + *

    + * The {@code flagVersion}, {@code trackEvents}, and {@code debugEventsUntilDate} parameters are + * normally generated internally by the SDK; they are used if the {@link FeatureFlagsState} data + * has been passed to front-end code, to control how analytics events are generated by the front + * end. If you are using this builder in back-end test code, those values are unimportant. + * + * @param flagKey the feature flag key + * @param value the evaluated value + * @param variationIndex the evaluated variation index + * @param reason the evaluation reason + * @param flagVersion the current flag version + * @param trackEvents true if full event tracking is turned on for this flag + * @param debugEventsUntilDate if set, event debugging is turned until this time (millisecond timestamp) + * @return the builder + */ + public Builder add( + String flagKey, + LDValue value, + Integer variationIndex, + EvaluationReason reason, + int flagVersion, + boolean trackEvents, + Long debugEventsUntilDate + ) { + return add(flagKey, value, variationIndex, reason, flagVersion, trackEvents, false, debugEventsUntilDate); + } + + /** + * Adds data to the builder representing the result of a feature flag evaluation. + *

    + * The {@code flagVersion}, {@code trackEvents}, and {@code debugEventsUntilDate} parameters are + * normally generated internally by the SDK; they are used if the {@link FeatureFlagsState} data + * has been passed to front-end code, to control how analytics events are generated by the front + * end. If you are using this builder in back-end test code, those values are unimportant. + * + * @param flagKey the feature flag key + * @param value the evaluated value + * @param variationIndex the evaluated variation index + * @param reason the evaluation reason + * @param flagVersion the current flag version + * @param trackEvents true if full event tracking is turned on for this flag + * @param trackReason true if evaluation reasons must be included due to experimentation + * @param debugEventsUntilDate if set, event debugging is turned until this time (millisecond timestamp) + * @return the builder + */ + public Builder add( + String flagKey, + LDValue value, + Integer variationIndex, + EvaluationReason reason, + int flagVersion, + boolean trackEvents, + boolean trackReason, + Long debugEventsUntilDate + ) { + final boolean flagIsTracked = trackEvents || + (debugEventsUntilDate != null && debugEventsUntilDate > System.currentTimeMillis()); + final boolean wantDetails = !detailsOnlyForTrackedFlags || flagIsTracked; + FlagMetadata data = new FlagMetadata( + value, + variationIndex, + (saveReasons && wantDetails) || trackReason ? reason : null, + wantDetails ? Integer.valueOf(flagVersion) : null, + trackEvents, + trackReason, + debugEventsUntilDate + ); + flagMetadata.put(flagKey, data); + return this; + } + + Builder addFlag(DataModel.FeatureFlag flag, EvalResult eval) { + return add( + flag.getKey(), + eval.getValue(), + eval.isNoVariation() ? null : eval.getVariationIndex(), + eval.getReason(), + flag.getVersion(), + flag.isTrackEvents() || eval.isForceReasonTracking(), + eval.isForceReasonTracking(), + flag.getDebugEventsUntilDate() + ); + } + + /** + * Returns an object created from the builder state. + * + * @return an immutable {@link FeatureFlagsState} + */ + public FeatureFlagsState build() { + return new FeatureFlagsState(flagMetadata.build(), valid); + } + } + + static class JsonSerialization extends TypeAdapter { + @Override + public void write(JsonWriter out, FeatureFlagsState state) throws IOException { + out.beginObject(); + + for (Map.Entry entry: state.flagMetadata.entrySet()) { + out.name(entry.getKey()); + gsonInstanceWithNullsAllowed().toJson(entry.getValue().value, LDValue.class, out); + } + + out.name("$flagsState"); + out.beginObject(); + for (Map.Entry entry: state.flagMetadata.entrySet()) { + out.name(entry.getKey()); + FlagMetadata meta = entry.getValue(); + out.beginObject(); + // Here we're serializing FlagMetadata properties individually because if we rely on + // Gson's reflection mechanism, it won't reliably drop null properties (that only works + // if the destination really is Gson, not if a Jackson adapter is being used). + if (meta.variation != null) { + out.name("variation"); + out.value(meta.variation.intValue()); + } + if (meta.reason != null) { + out.name("reason"); + gsonInstanceWithNullsAllowed().toJson(meta.reason, EvaluationReason.class, out); + } + if (meta.version != null) { + out.name("version"); + out.value(meta.version.intValue()); + } + if (meta.trackEvents) { + out.name("trackEvents"); + out.value(meta.trackEvents); + } + if (meta.trackReason) { + out.name("trackReason"); + out.value(meta.trackReason); + } + if (meta.debugEventsUntilDate != null) { + out.name("debugEventsUntilDate"); + out.value(meta.debugEventsUntilDate.longValue()); + } + out.endObject(); + } + out.endObject(); + + out.name("$valid"); + out.value(state.valid); + + out.endObject(); + } + + // There isn't really a use case for deserializing this, but we have to implement it + @Override + public FeatureFlagsState read(JsonReader in) throws IOException { + Map flagValues = new HashMap<>(); + Map flagMetadataWithoutValues = new HashMap<>(); + boolean valid = true; + in.beginObject(); + while (in.hasNext()) { + String name = in.nextName(); + if (name.equals("$flagsState")) { + in.beginObject(); + while (in.hasNext()) { + String metaName = in.nextName(); + FlagMetadata meta = gsonInstanceWithNullsAllowed().fromJson(in, FlagMetadata.class); + flagMetadataWithoutValues.put(metaName, meta); + } + in.endObject(); + } else if (name.equals("$valid")) { + valid = in.nextBoolean(); + } else { + LDValue value = gsonInstanceWithNullsAllowed().fromJson(in, LDValue.class); + flagValues.put(name, value); + } + } + in.endObject(); + ImmutableMap.Builder allFlagMetadata = ImmutableMap.builder(); + for (Map.Entry e: flagValues.entrySet()) { + FlagMetadata m0 = flagMetadataWithoutValues.get(e.getKey()); + if (m0 != null) { + FlagMetadata m1 = new FlagMetadata( + e.getValue(), + m0.variation, + m0.reason, + m0.version, + m0.trackEvents, + m0.trackReason, + m0.debugEventsUntilDate + ); + allFlagMetadata.put(e.getKey(), m1); + } + } + return new FeatureFlagsState(allFlagMetadata.build(), valid); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java new file mode 100644 index 0000000..71c79c3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FeatureRequestor.java @@ -0,0 +1,26 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import java.io.Closeable; +import java.io.IOException; + +/** + * Internal abstraction for polling requests. Currently this is only used by PollingProcessor, and + * the only implementation is DefaultFeatureRequestor, but using an interface allows us to mock out + * the HTTP behavior and test the rest of PollingProcessor separately. + */ +interface FeatureRequestor extends Closeable { + /** + * Makes a request to the LaunchDarkly server-side SDK polling endpoint, + * + * @param returnDataEvenIfCached true if the method should return non-nil data no matter what; + * false if it should return {@code null} when the latest data is already in the cache + * @return the data, or {@code null} as above + * @throws IOException for network errors + * @throws HttpErrorException for HTTP error responses + */ + FullDataSet getAllData(boolean returnDataEvenIfCached) throws IOException, HttpErrorException; +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java new file mode 100644 index 0000000..3f9fb1a --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagTrackerImpl.java @@ -0,0 +1,67 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; +import com.launchdarkly.sdk.server.interfaces.FlagTracker; +import com.launchdarkly.sdk.server.interfaces.FlagValueChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagValueChangeListener; + +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; + +final class FlagTrackerImpl implements FlagTracker { + private final EventBroadcasterImpl flagChangeBroadcaster; + private final BiFunction evaluateFn; + + FlagTrackerImpl( + EventBroadcasterImpl flagChangeBroadcaster, + BiFunction evaluateFn + ) { + this.flagChangeBroadcaster = flagChangeBroadcaster; + this.evaluateFn = evaluateFn; + } + + @Override + public void addFlagChangeListener(FlagChangeListener listener) { + flagChangeBroadcaster.register(listener); + } + + @Override + public void removeFlagChangeListener(FlagChangeListener listener) { + flagChangeBroadcaster.unregister(listener); + } + + @Override + public FlagChangeListener addFlagValueChangeListener(String flagKey, LDContext context, FlagValueChangeListener listener) { + FlagValueChangeAdapter adapter = new FlagValueChangeAdapter(flagKey, context, listener); + addFlagChangeListener(adapter); + return adapter; + } + + private final class FlagValueChangeAdapter implements FlagChangeListener { + private final String flagKey; + private final LDContext context; + private final FlagValueChangeListener listener; + private final AtomicReference value; + + FlagValueChangeAdapter(String flagKey, LDContext context, FlagValueChangeListener listener) { + this.flagKey = flagKey; + this.context = context; + this.listener = listener; + this.value = new AtomicReference<>(evaluateFn.apply(flagKey, context)); + } + + @Override + public void onFlagChange(FlagChangeEvent event) { + if (event.getKey().equals(flagKey)) { + LDValue newValue = evaluateFn.apply(flagKey, context); + LDValue oldValue = value.getAndSet(newValue); + if (!newValue.equals(oldValue)) { + listener.onFlagValueChange(new FlagValueChangeEvent(flagKey, oldValue, newValue)); + } + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java new file mode 100644 index 0000000..8204ba9 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/FlagsStateOption.java @@ -0,0 +1,49 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; + +/** + * Optional parameters that can be passed to {@link LDClientInterface#allFlagsState(com.launchdarkly.sdk.LDContext, FlagsStateOption...)}. + * @since 4.3.0 + */ +public final class FlagsStateOption { + private final String description; + + private FlagsStateOption(String description) { + this.description = description; + } + + @Override + public String toString() { + return description; + } + + /** + * Specifies that only flags marked for use with the client-side SDK should be included in the state object. + * By default, all flags are included. + */ + public static final FlagsStateOption CLIENT_SIDE_ONLY = new FlagsStateOption("CLIENT_SIDE_ONLY"); + + /** + * Specifies that {@link EvaluationReason} data should be captured in the state object. By default, it is not. + */ + public static final FlagsStateOption WITH_REASONS = new FlagsStateOption("WITH_REASONS"); + + /** + * Specifies that any flag metadata that is normally only used for event generation - such as flag versions and + * evaluation reasons - should be omitted for any flag that does not have event tracking or debugging turned on. + * This reduces the size of the JSON data if you are passing the flag state to the front end. + * @since 4.4.0 + */ + public static final FlagsStateOption DETAILS_ONLY_FOR_TRACKED_FLAGS = new FlagsStateOption("DETAILS_ONLY_FOR_TRACKED_FLAGS"); + + static boolean hasOption(FlagsStateOption[] options, FlagsStateOption option) { + for (FlagsStateOption o: options) { + if (o == option) { + return true; + } + } + return false; + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java new file mode 100644 index 0000000..4753084 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InMemoryDataStore.java @@ -0,0 +1,121 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** + * A thread-safe, versioned store for feature flags and related data based on a + * {@link HashMap}. This is the default implementation of {@link DataStore}. + * + * As of version 5.0.0, this is package-private; applications must use the factory method + * {@link Components#inMemoryDataStore()}. + */ +class InMemoryDataStore implements DataStore { + private volatile ImmutableMap> allData = ImmutableMap.of(); + private volatile boolean initialized = false; + private Object writeLock = new Object(); + + @Override + public void init(FullDataSet allData) { + synchronized (writeLock) { + ImmutableMap.Builder> newData = ImmutableMap.builder(); + for (Map.Entry> entry: allData.getData()) { + newData.put(entry.getKey(), ImmutableMap.copyOf(entry.getValue().getItems())); + } + this.allData = newData.build(); // replaces the entire map atomically + this.initialized = true; + } + } + + @Override + public ItemDescriptor get(DataKind kind, String key) { + Map items = allData.get(kind); + if (items == null) { + return null; + } + return items.get(key); + } + + @Override + public KeyedItems getAll(DataKind kind) { + Map items = allData.get(kind); + if (items == null) { + return new KeyedItems<>(null); + } + return new KeyedItems<>(ImmutableList.copyOf(items.entrySet())); + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + synchronized (writeLock) { + Map existingItems = this.allData.get(kind); + ItemDescriptor oldItem = null; + if (existingItems != null) { + oldItem = existingItems.get(key); + if (oldItem != null && oldItem.getVersion() >= item.getVersion()) { + return false; + } + } + // The following logic is necessary because ImmutableMap.Builder doesn't support overwriting an existing key + ImmutableMap.Builder> newData = ImmutableMap.builder(); + for (Map.Entry> e: this.allData.entrySet()) { + if (!e.getKey().equals(kind)) { + newData.put(e.getKey(), e.getValue()); + } + } + if (existingItems == null) { + newData.put(kind, ImmutableMap.of(key, item)); + } else { + ImmutableMap.Builder itemsBuilder = ImmutableMap.builder(); + if (oldItem == null) { + itemsBuilder.putAll(existingItems); + } else { + for (Map.Entry e: existingItems.entrySet()) { + if (!e.getKey().equals(key)) { + itemsBuilder.put(e.getKey(), e.getValue()); + } + } + } + itemsBuilder.put(key, item); + newData.put(kind, itemsBuilder.build()); + } + this.allData = newData.build(); // replaces the entire map atomically + return true; + } + } + + @Override + public boolean isInitialized() { + return initialized; + } + + @Override + public boolean isStatusMonitoringEnabled() { + return false; + } + + @Override + public CacheStats getCacheStats() { + return null; + } + + /** + * Does nothing; this class does not have any resources to release + * + * @throws IOException will never happen + */ + @Override + public void close() throws IOException { + return; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InputValidatingEvaluator.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InputValidatingEvaluator.java new file mode 100644 index 0000000..526e151 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/InputValidatingEvaluator.java @@ -0,0 +1,302 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import java.util.Map; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.subsystems.EventProcessor.NO_VERSION; + +/** + * This is an evaluator that handles error cases related to initialization, parameter validation, evaluation result + * type assertion, and runtime exceptions. + */ +class InputValidatingEvaluator implements EvaluatorInterface { + + private final Evaluator evaluator; + private final DataStore store; + private final LDLogger logger; + + // these are created at construction to avoid recreation during each evaluation + private final EvaluationRecorder evaluationEventRecorderWithDetails; + private final EvaluationRecorder evaluationEventRecorderWithoutDetails; + static final EvaluationRecorder NO_OP_EVALUATION_EVENT_RECORDER = new EvaluationRecorder() { + }; + + /** + * Creates an {@link InputValidatingEvaluator} + * + * @param store will be used to get flag data + * @param segmentStore will be used to get segment data + * @param eventProcessor will be used to record events during evaluations as necessary + * @param logger for logging messages and errors during evaluations + */ + InputValidatingEvaluator(DataStore store, BigSegmentStoreWrapper segmentStore, @Nonnull EventProcessor eventProcessor, LDLogger logger) { + this.evaluator = new Evaluator(new Evaluator.Getters() { + public DataModel.FeatureFlag getFlag(String key) { + return InputValidatingEvaluator.getFlag(store, key); + } + + public DataModel.Segment getSegment(String key) { + return InputValidatingEvaluator.getSegment(store, key); + } + + public BigSegmentStoreWrapper.BigSegmentsQueryResult getBigSegments(String key) { + return segmentStore == null ? null : segmentStore.getUserMembership(key); + } + + }, logger); + + this.store = store; + this.logger = logger; + + // these are created at construction to avoid recreation during each evaluation + this.evaluationEventRecorderWithDetails = makeEvaluationRecorder(eventProcessor, true); + this.evaluationEventRecorderWithoutDetails = makeEvaluationRecorder(eventProcessor, false); + } + + @Override + public EvalResultAndFlag evalAndFlag(String method, String flagKey, LDContext context, LDValue defaultValue, + @Nullable LDValueType requireType, EvaluationOptions options) { + // this implementation does not care for the method parameter + + // map options to appropriate event sink + EvaluationRecorder sink; + if (options == EvaluationOptions.EVENTS_WITH_REASONS) { + sink = evaluationEventRecorderWithDetails; + } else if (options == EvaluationOptions.EVENTS_WITHOUT_REASONS) { + sink = evaluationEventRecorderWithoutDetails; + } else { + sink = NO_OP_EVALUATION_EVENT_RECORDER; + } + + return evaluate(flagKey, context, defaultValue, requireType, sink); + } + + /** + * This function evaluates using the provided information and handles error cases related to initialization, + * parameter validation, evaluation result type assertion, and runtime exceptions. + * + * @param flagKey key of the flag that will be evaluated + * @param context the evaluation context + * @param defaultValue the default value that will be returned in the case where the evaluator is unable to positively + * evaluate the flag. This may be because the flag is unknown, invalid context usage, or several + * other potential reasons. + * @param requireType if not null, a value type assertion will be made + * @param recorder the recorder that will record during evaluation + * @return an {@link EvalResultAndFlag} - guaranteed non-null + */ + EvalResultAndFlag evaluate(String flagKey, LDContext context, LDValue defaultValue, + @Nullable LDValueType requireType, EvaluationRecorder recorder) { + if (!store.isInitialized()) { + logger.warn("Evaluation called before client initialized for feature flag \"{}\"; data store unavailable, returning default value", flagKey); + recorder.recordEvaluationUnknownFlagError(flagKey, context, defaultValue, ErrorKind.CLIENT_NOT_READY); + return new EvalResultAndFlag(EvalResult.error(ErrorKind.CLIENT_NOT_READY, defaultValue), null); + } + + if (context == null) { + logger.warn("Null context when evaluating flag \"{}\"; returning default value", flagKey); + return new EvalResultAndFlag(EvalResult.error(ErrorKind.USER_NOT_SPECIFIED, defaultValue), null); + } + if (!context.isValid()) { + logger.warn("Invalid context when evaluating flag \"{}\"; returning default value: " + context.getError(), flagKey); + return new EvalResultAndFlag(EvalResult.error(ErrorKind.USER_NOT_SPECIFIED, defaultValue), null); + } + + FeatureFlag featureFlag = null; + try { + featureFlag = getFlag(store, flagKey); + if (featureFlag == null) { + logger.info("Unknown feature flag \"{}\"; returning default value", flagKey); + recorder.recordEvaluationUnknownFlagError(flagKey, context, defaultValue, ErrorKind.FLAG_NOT_FOUND); + return new EvalResultAndFlag(EvalResult.error(ErrorKind.FLAG_NOT_FOUND, defaultValue), null); + } + + EvalResult result = evaluator.evaluate(featureFlag, context, recorder); + if (result.isNoVariation()) { + result = EvalResult.of(defaultValue, result.getVariationIndex(), result.getReason()); + } else { + LDValue value = result.getValue(); // guaranteed not to be an actual Java null, but can be LDValue.ofNull() + if (requireType != null && + !value.isNull() && + value.getType() != requireType) { + logger.error("Feature flag \"{}\"; evaluation expected result as {}, but got {}", flagKey, defaultValue.getType(), value.getType()); + recorder.recordEvaluationError(featureFlag, context, defaultValue, ErrorKind.WRONG_TYPE); + return new EvalResultAndFlag(EvalResult.error(ErrorKind.WRONG_TYPE, defaultValue), featureFlag); + } + } + + recorder.recordEvaluation(featureFlag, context, result, defaultValue); + return new EvalResultAndFlag(result, featureFlag); + + } catch (Exception e) { + logger.error("Encountered exception while evaluating feature flag \"{}\": {}", flagKey, + LogValues.exceptionSummary(e)); + logger.debug("{}", LogValues.exceptionTrace(e)); + if (featureFlag == null) { + recorder.recordEvaluationUnknownFlagError(flagKey, context, defaultValue, ErrorKind.EXCEPTION); + } else { + recorder.recordEvaluationError(featureFlag, context, defaultValue, ErrorKind.EXCEPTION); + } + return new EvalResultAndFlag(EvalResult.of(defaultValue, NO_VARIATION, EvaluationReason.exception(e)), null); + } + } + + public FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options) { + FeatureFlagsState.Builder builder = FeatureFlagsState.builder(options); + + if (!store.isInitialized()) { + logger.warn("allFlagsState() was called before client initialized; data store unavailable, returning no data"); + return builder.valid(false).build(); + } + + if (context == null) { + logger.warn("allFlagsState() was called with null context! returning no data"); + return builder.valid(false).build(); + } + if (!context.isValid()) { + logger.warn("allFlagsState() was called with invalid context: " + context.getError()); + return builder.valid(false).build(); + } + + boolean clientSideOnly = FlagsStateOption.hasOption(options, FlagsStateOption.CLIENT_SIDE_ONLY); + DataStoreTypes.KeyedItems flags; + try { + flags = store.getAll(FEATURES); + } catch (Exception e) { + logger.error("Exception from data store when evaluating all flags: {}", LogValues.exceptionSummary(e)); + logger.debug(e.toString(), LogValues.exceptionTrace(e)); + return builder.valid(false).build(); + } + + for (Map.Entry entry : flags.getItems()) { + if (entry.getValue().getItem() == null) { + continue; // deleted flag placeholder + } + DataModel.FeatureFlag flag = (DataModel.FeatureFlag) entry.getValue().getItem(); + if (clientSideOnly && !flag.isClientSide()) { + continue; + } + try { + // Note: a no op evaluation event recorder is provided as we don't want the all flag state to generate + // any evaluation events. + EvalResult result = evaluator.evaluate(flag, context, NO_OP_EVALUATION_EVENT_RECORDER); + builder.addFlag(flag, result); + } catch (Exception e) { + logger.error("Exception caught for feature flag \"{}\" when evaluating all flags: {}", flag.getKey(), + LogValues.exceptionSummary(e)); + logger.debug(e.toString(), LogValues.exceptionTrace(e)); + builder.addFlag(flag, EvalResult.of(LDValue.ofNull(), NO_VARIATION, EvaluationReason.exception(e))); + } + } + return builder.build(); + } + + private static DataModel.FeatureFlag getFlag(DataStore store, String key) { + DataStoreTypes.ItemDescriptor item = store.get(FEATURES, key); + return item == null ? null : (DataModel.FeatureFlag) item.getItem(); + } + + private static DataModel.Segment getSegment(DataStore store, String key) { + DataStoreTypes.ItemDescriptor item = store.get(SEGMENTS, key); + return item == null ? null : (DataModel.Segment) item.getItem(); + } + + /** + * This function will create an {@link EvaluationRecorder} that uses the provided processor internally and + * adjusts behavior based on the other parameters provided. + * + * @param processor that will be used internally + * @param withReasons controls whether to include reasons when recording the events + * @return the {@link EvaluationRecorder} + */ + private static EvaluationRecorder makeEvaluationRecorder(EventProcessor processor, boolean withReasons) { + return new EvaluationRecorder() { + @Override + public void recordEvaluation(FeatureFlag flag, LDContext context, EvalResult result, LDValue defaultValue) { + processor.recordEvaluationEvent( + context, + flag.getKey(), + flag.getVersion(), + result.getVariationIndex(), + result.getValue(), + (withReasons || result.isForceReasonTracking()) ? result.getReason() : null, + defaultValue, + null, + flag.isTrackEvents() || result.isForceReasonTracking(), + flag.getDebugEventsUntilDate(), + flag.isExcludeFromSummaries(), + flag.getSamplingRatio() + ); + } + + @Override + public void recordPrerequisiteEvaluation(FeatureFlag flag, FeatureFlag prereqOfFlag, LDContext context, EvalResult result) { + processor.recordEvaluationEvent( + context, + flag.getKey(), + flag.getVersion(), + result.getVariationIndex(), + result.getValue(), + (withReasons || result.isForceReasonTracking()) ? result.getReason() : null, + LDValue.ofNull(), // note this default value ofNull is special because pre-req evals don't have defaulting + prereqOfFlag.getKey(), + flag.isTrackEvents() || result.isForceReasonTracking(), + flag.getDebugEventsUntilDate(), + flag.isExcludeFromSummaries(), + flag.getSamplingRatio() + ); + } + + @Override + public void recordEvaluationError(FeatureFlag flag, LDContext context, LDValue defaultValue, ErrorKind errorKind) { + processor.recordEvaluationEvent( + context, + flag.getKey(), + flag.getVersion(), + NO_VARIATION, + defaultValue, + withReasons ? EvaluationReason.error(errorKind) : null, + defaultValue, + null, + flag.isTrackEvents(), + flag.getDebugEventsUntilDate(), + flag.isExcludeFromSummaries(), + flag.getSamplingRatio() + ); + } + + @Override + public void recordEvaluationUnknownFlagError(String flagKey, LDContext context, LDValue defaultValue, ErrorKind errorKind) { + processor.recordEvaluationEvent( + context, + flagKey, + NO_VERSION, + NO_VARIATION, + defaultValue, + withReasons ? EvaluationReason.error(errorKind) : null, + defaultValue, + null, + false, + null, + false, + null + ); + } + }; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java new file mode 100644 index 0000000..1555444 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/JsonHelpers.java @@ -0,0 +1,127 @@ +package com.launchdarkly.sdk.server; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.TypeAdapter; +import com.google.gson.TypeAdapterFactory; +import com.google.gson.reflect.TypeToken; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import java.io.IOException; + +abstract class JsonHelpers { + private JsonHelpers() {} + + private static final Gson gsonWithNullsAllowed = new GsonBuilder().serializeNulls().create(); + private static final Gson gsonWithNullsSuppressed = new GsonBuilder().create(); + + /** + * Returns a shared instance of Gson with default configuration. This should not be used for serializing + * event data, since it does not have any of the configurable behavior related to private attributes. + * Code in _unit tests_ should _not_ use this method, because the tests can be run from other projects + * in an environment where the classpath contains a shaded copy of Gson instead of regular Gson. + * + * @see #gsonWithNullsAllowed + */ + static Gson gsonInstance() { + return gsonWithNullsSuppressed; + } + + /** + * Returns a shared instance of Gson with default configuration except that properties with null values + * are not automatically dropped. We use this in contexts where we want to exactly reproduce + * whatever the serializer for a type is outputting. + * + * @see #gsonInstance() + */ + static Gson gsonInstanceWithNullsAllowed() { + return gsonWithNullsAllowed; + } + + /** + * Deserializes an object from JSON. We should use this helper method instead of directly calling + * gson.fromJson() to minimize reliance on details of the framework we're using, and to ensure that we + * consistently use our wrapper exception. + * + * @param json the serialized JSON string + * @param objectClass class of object to create + * @return the deserialized object + * @throws SerializationException if Gson throws an exception + */ + static T deserialize(String json, Class objectClass) throws SerializationException { + try { + return gsonInstance().fromJson(json, objectClass); + } catch (Exception e) { + throw new SerializationException(e); + } + } + + /** + * Deserializes an object from a JSON stream. + * + * @param reader the JSON reader + * @param objectClass class of object to create + * @return the deserialized object + * @throws SerializationException if Gson throws an exception + */ + static T deserialize(JsonReader reader, Class objectClass) throws SerializationException { + try { + return gsonInstance().fromJson(reader, objectClass); + } catch (Exception e) { + throw new SerializationException(e); + } + } + + /** + * Serializes an object to JSON. We should use this helper method instead of directly calling + * gson.toJson() to minimize reliance on details of the framework we're using (except when we need to use + * gsonInstanceForEventsSerialization, since our event serialization logic isn't well suited to using a + * simple abstraction). + * + * @param o the object to serialize + * @return the serialized JSON string + */ + static String serialize(Object o) { + return gsonInstance().toJson(o); + } + + /** + * Implement this interface on any internal class that needs to do some kind of post-processing after + * being unmarshaled from JSON. You must also add the annotation {@code JsonAdapter(JsonHelpers.PostProcessingDeserializableTypeAdapterFactory)} + * to the class for this to work. + */ + static interface PostProcessingDeserializable { + void afterDeserialized(); + } + + static class PostProcessingDeserializableTypeAdapterFactory implements TypeAdapterFactory { + @Override + public TypeAdapter create(Gson gson, TypeToken type) { + return new PostProcessingDeserializableTypeAdapter<>(gson.getDelegateAdapter(this, type)); + } + } + + private static class PostProcessingDeserializableTypeAdapter extends TypeAdapter { + private final TypeAdapter baseAdapter; + + PostProcessingDeserializableTypeAdapter(TypeAdapter baseAdapter) { + this.baseAdapter = baseAdapter; + } + + @Override + public void write(JsonWriter out, T value) throws IOException { + baseAdapter.write(out, value); + } + + @Override + public T read(JsonReader in) throws IOException { + T instance = baseAdapter.read(in); + if (instance instanceof PostProcessingDeserializable) { + ((PostProcessingDeserializable)instance).afterDeserialized(); + } + return instance; + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDClient.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDClient.java new file mode 100644 index 0000000..4e6bda3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDClient.java @@ -0,0 +1,526 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.internal.http.HttpHelpers; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; +import com.launchdarkly.sdk.server.interfaces.FlagTracker; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import org.apache.commons.codec.binary.Hex; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static com.google.common.base.Preconditions.checkNotNull; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; + +/** + * A client for the LaunchDarkly API. Client instances are thread-safe. Applications should instantiate + * a single {@code LDClient} for the lifetime of their application. + */ +public final class LDClient implements LDClientInterface { + private static final String HMAC_ALGORITHM = "HmacSHA256"; + + private final String sdkKey; + private final boolean offline; + @VisibleForTesting + final EvaluatorInterface evaluator; + final EvaluatorInterface migrationEvaluator; + final EventProcessor eventProcessor; + final DataSource dataSource; + final DataStore dataStore; + private final BigSegmentStoreStatusProvider bigSegmentStoreStatusProvider; + private final BigSegmentStoreWrapper bigSegmentStoreWrapper; + private final DataSourceUpdateSink dataSourceUpdates; + private final DataStoreStatusProviderImpl dataStoreStatusProvider; + private final DataSourceStatusProviderImpl dataSourceStatusProvider; + private final FlagTrackerImpl flagTracker; + private final EventBroadcasterImpl flagChangeBroadcaster; + private final ScheduledExecutorService sharedExecutor; + private final LDLogger baseLogger; + private final LDLogger evaluationLogger; + + private static final int EXCESSIVE_INIT_WAIT_MILLIS = 60000; + + /** + * Creates a new client instance that connects to LaunchDarkly with the default configuration. + *

    + * If you need to specify any custom SDK options, use {@link LDClient#LDClient(String, LDConfig)} + * instead. + *

    + * Applications should instantiate a single instance for the lifetime of the application. In + * unusual cases where an application needs to evaluate feature flags from different LaunchDarkly + * projects or environments, you may create multiple clients, but they should still be retained + * for the lifetime of the application rather than created per request or per thread. + *

    + * The client will begin attempting to connect to LaunchDarkly as soon as you call the constructor. + * The constructor will return when it successfully connects, or when the default timeout of 5 seconds + * expires, whichever comes first. If it has not succeeded in connecting when the timeout elapses, + * you will receive the client in an uninitialized state where feature flags will return default + * values; it will still continue trying to connect in the background. You can detect whether + * initialization has succeeded by calling {@link #isInitialized()}. If you prefer to customize + * this behavior, use {@link LDClient#LDClient(String, LDConfig)} instead. + *

    + * For rules regarding the throwing of unchecked exceptions for error conditions, see + * {@link LDClient#LDClient(String, LDConfig)}. + * + * @param sdkKey the SDK key for your LaunchDarkly environment + * @throws IllegalArgumentException if a parameter contained a grossly malformed value; + * for security reasons, in case of an illegal SDK key, the exception message does + * not include the key + * @throws NullPointerException if a non-nullable parameter was null + * @see LDClient#LDClient(String, LDConfig) + */ + public LDClient(String sdkKey) { + // COVERAGE: this constructor cannot be called in unit tests because it uses the default base + // URI and will attempt to make a live connection to LaunchDarkly. + this(sdkKey, LDConfig.DEFAULT); + } + + private static DataModel.FeatureFlag getFlag(DataStore store, String key) { + ItemDescriptor item = store.get(FEATURES, key); + return item == null ? null : (DataModel.FeatureFlag) item.getItem(); + } + + private static DataModel.Segment getSegment(DataStore store, String key) { + ItemDescriptor item = store.get(SEGMENTS, key); + return item == null ? null : (DataModel.Segment) item.getItem(); + } + + /** + * Creates a new client to connect to LaunchDarkly with a custom configuration. + *

    + * This constructor can be used to configure advanced SDK features; see {@link LDConfig.Builder}. + *

    + * Applications should instantiate a single instance for the lifetime of the application. In + * unusual cases where an application needs to evaluate feature flags from different LaunchDarkly + * projects or environments, you may create multiple clients, but they should still be retained + * for the lifetime of the application rather than created per request or per thread. + *

    + * Unless it is configured to be offline with {@link LDConfig.Builder#offline(boolean)} or + * {@link Components#externalUpdatesOnly()}, the client will begin attempting to connect to + * LaunchDarkly as soon as you call the constructor. The constructor will return when it successfully + * connects, or when the timeout set by {@link LDConfig.Builder#startWait(java.time.Duration)} (default: + * 5 seconds) expires, whichever comes first. If it has not succeeded in connecting when the timeout + * elapses, you will receive the client in an uninitialized state where feature flags will return + * default values; it will still continue trying to connect in the background. You can detect + * whether initialization has succeeded by calling {@link #isInitialized()}. + *

    + * If you prefer to have the constructor return immediately, and then wait for initialization to finish + * at some other point, you can use {@link #getDataSourceStatusProvider()} as follows: + *

    
    +   *     LDConfig config = new LDConfig.Builder()
    +   *         .startWait(Duration.ZERO)
    +   *         .build();
    +   *     LDClient client = new LDClient(sdkKey, config);
    +   *
    +   *     // later, when you want to wait for initialization to finish:
    +   *     boolean inited = client.getDataSourceStatusProvider().waitFor(
    +   *         DataSourceStatusProvider.State.VALID, Duration.ofSeconds(10));
    +   *     if (!inited) {
    +   *         // do whatever is appropriate if initialization has timed out
    +   *     }
    +   * 
    + *

    + * This constructor can throw unchecked exceptions if it is immediately apparent that + * the SDK cannot work with these parameters. For instance, if the SDK key contains a + * non-printable character that cannot be used in an HTTP header, it will throw an + * {@link IllegalArgumentException} since the SDK key is normally sent to LaunchDarkly + * in an HTTP header and no such value could possibly be valid. Similarly, a null + * value for a non-nullable parameter may throw a {@link NullPointerException}. The + * constructor will not throw an exception for any error condition that could only be + * detected after making a request to LaunchDarkly (such as an SDK key that is simply + * wrong despite being valid ASCII, so it is invalid but not illegal); those are logged + * and treated as an unsuccessful initialization, as described above. + * + * @param sdkKey the SDK key for your LaunchDarkly environment + * @param config a client configuration object + * @throws IllegalArgumentException if a parameter contained a grossly malformed value; + * for security reasons, in case of an illegal SDK key, the exception message does + * not include the key + * @throws NullPointerException if a non-nullable parameter was null + * @see LDClient#LDClient(String, LDConfig) + */ + public LDClient(String sdkKey, LDConfig config) { + checkNotNull(config, "config must not be null"); + this.sdkKey = checkNotNull(sdkKey, "sdkKey must not be null"); + if (!HttpHelpers.isAsciiHeaderValue(sdkKey)) { + throw new IllegalArgumentException("SDK key contained an invalid character"); + } + this.offline = config.offline; + + this.sharedExecutor = createSharedExecutor(config); + + final ClientContextImpl context = ClientContextImpl.fromConfig( + sdkKey, + config, + sharedExecutor + ); + this.baseLogger = context.getBaseLogger(); + this.evaluationLogger = this.baseLogger.subLogger(Loggers.EVALUATION_LOGGER_NAME); + + this.eventProcessor = config.events.build(context); + + EventBroadcasterImpl bigSegmentStoreStatusNotifier = + EventBroadcasterImpl.forBigSegmentStoreStatus(sharedExecutor, baseLogger); + BigSegmentsConfiguration bigSegmentsConfig = config.bigSegments.build(context); + if (bigSegmentsConfig.getStore() != null) { + bigSegmentStoreWrapper = new BigSegmentStoreWrapper(bigSegmentsConfig, bigSegmentStoreStatusNotifier, sharedExecutor, + this.baseLogger.subLogger(Loggers.BIG_SEGMENTS_LOGGER_NAME)); + } else { + bigSegmentStoreWrapper = null; + } + bigSegmentStoreStatusProvider = new BigSegmentStoreStatusProviderImpl(bigSegmentStoreStatusNotifier, bigSegmentStoreWrapper); + + EventBroadcasterImpl dataStoreStatusNotifier = + EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, baseLogger); + DataStoreUpdatesImpl dataStoreUpdates = new DataStoreUpdatesImpl(dataStoreStatusNotifier); + this.dataStore = config.dataStore.build(context.withDataStoreUpdateSink(dataStoreUpdates)); + + EvaluatorInterface evaluator = new InputValidatingEvaluator(dataStore, bigSegmentStoreWrapper, eventProcessor, evaluationLogger); + + // decorate evaluator with hooks if hooks were provided + if (config.hooks.getHooks().isEmpty()) { + this.evaluator = evaluator; + this.migrationEvaluator = new MigrationStageEnforcingEvaluator(evaluator, evaluationLogger); + } else { + this.evaluator = new EvaluatorWithHooks(evaluator, config.hooks.getHooks(), this.baseLogger.subLogger(Loggers.HOOKS_LOGGER_NAME)); + this.migrationEvaluator = new EvaluatorWithHooks(new MigrationStageEnforcingEvaluator(evaluator, evaluationLogger), config.hooks.getHooks(), this.baseLogger.subLogger(Loggers.HOOKS_LOGGER_NAME)); + } + + this.flagChangeBroadcaster = EventBroadcasterImpl.forFlagChangeEvents(sharedExecutor, baseLogger); + this.flagTracker = new FlagTrackerImpl(flagChangeBroadcaster, + (key, ctx) -> jsonValueVariation(key, ctx, LDValue.ofNull())); + + this.dataStoreStatusProvider = new DataStoreStatusProviderImpl(this.dataStore, dataStoreUpdates); + + EventBroadcasterImpl dataSourceStatusNotifier = + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, baseLogger); + DataSourceUpdatesImpl dataSourceUpdates = new DataSourceUpdatesImpl( + dataStore, + dataStoreStatusProvider, + flagChangeBroadcaster, + dataSourceStatusNotifier, + sharedExecutor, + context.getLogging().getLogDataSourceOutageAsErrorAfter(), + baseLogger + ); + this.dataSourceUpdates = dataSourceUpdates; + this.dataSource = config.dataSource.build(context.withDataSourceUpdateSink(dataSourceUpdates)); + this.dataSourceStatusProvider = new DataSourceStatusProviderImpl(dataSourceStatusNotifier, dataSourceUpdates); + + Future startFuture = dataSource.start(); + if (!config.startWait.isZero() && !config.startWait.isNegative()) { + if (!(dataSource instanceof ComponentsImpl.NullDataSource)) { + baseLogger.info("Waiting up to {} milliseconds for LaunchDarkly client to start...", + config.startWait.toMillis()); + if (config.startWait.toMillis() > EXCESSIVE_INIT_WAIT_MILLIS) { + baseLogger.warn("LaunchDarkly client created with start wait time of {} milliseconds. We recommend a timeout of less than {} milliseconds.", config.startWait.toMillis(), EXCESSIVE_INIT_WAIT_MILLIS); + } + } + try { + startFuture.get(config.startWait.toMillis(), TimeUnit.MILLISECONDS); + } catch (TimeoutException e) { + baseLogger.error("Timeout encountered waiting for LaunchDarkly client initialization"); + } catch (Exception e) { + baseLogger.error("Exception encountered waiting for LaunchDarkly client initialization: {}", + LogValues.exceptionSummary(e)); + baseLogger.debug("{}", LogValues.exceptionTrace(e)); + } + if (!dataSource.isInitialized()) { + baseLogger.warn("LaunchDarkly client was not successfully initialized"); + } + } + } + + @Override + public boolean isInitialized() { + return dataSource.isInitialized(); + } + + @Override + public void track(String eventName, LDContext context) { + trackData(eventName, context, LDValue.ofNull()); + } + + @Override + public void trackMigration(MigrationOpTracker tracker) { + eventProcessor.recordMigrationEvent(tracker); + } + + @Override + public void trackData(String eventName, LDContext context, LDValue data) { + if (context == null) { + baseLogger.warn("Track called with null context!"); + } else if (!context.isValid()) { + baseLogger.warn("Track called with invalid context: " + context.getError()); + } else { + eventProcessor.recordCustomEvent(context, eventName, data, null); + } + } + + @Override + public void trackMetric(String eventName, LDContext context, LDValue data, double metricValue) { + if (context == null) { + baseLogger.warn("Track called with null context!"); + } else if (!context.isValid()) { + baseLogger.warn("Track called with invalid context: " + context.getError()); + } else { + eventProcessor.recordCustomEvent(context, eventName, data, metricValue); + } + } + + @Override + public void identify(LDContext context) { + if (context == null) { + baseLogger.warn("Identify called with null context!"); + } else if (!context.isValid()) { + baseLogger.warn("Identify called with invalid context: " + context.getError()); + } else { + eventProcessor.recordIdentifyEvent(context); + } + } + + @Override + public FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options) { + if (isOffline()) { + evaluationLogger.debug("allFlagsState() was called when client is in offline mode."); + } + + return evaluator.allFlagsState(context, options); + } + + @Override + public boolean boolVariation(String featureKey, LDContext context, boolean defaultValue) { + return evaluator.evalAndFlag("LDClient.boolVariation", featureKey, context, LDValue.of(defaultValue), LDValueType.BOOLEAN, + EvaluationOptions.EVENTS_WITHOUT_REASONS).getResult().getValue().booleanValue(); + } + + @Override + public int intVariation(String featureKey, LDContext context, int defaultValue) { + return evaluator.evalAndFlag("LDClient.intVariation", featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER, + EvaluationOptions.EVENTS_WITHOUT_REASONS).getResult().getValue().intValue(); + } + + @Override + public double doubleVariation(String featureKey, LDContext context, double defaultValue) { + return evaluator.evalAndFlag("LDClient.doubleVariation", featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER, + EvaluationOptions.EVENTS_WITHOUT_REASONS).getResult().getValue().doubleValue(); + } + + @Override + public String stringVariation(String featureKey, LDContext context, String defaultValue) { + return evaluator.evalAndFlag("LDClient.stringVariation", featureKey, context, LDValue.of(defaultValue), LDValueType.STRING, + EvaluationOptions.EVENTS_WITHOUT_REASONS).getResult().getValue().stringValue(); + + } + + @Override + public LDValue jsonValueVariation(String featureKey, LDContext context, LDValue defaultValue) { + return evaluator.evalAndFlag("LDClient.jsonValueVariation", featureKey, context, LDValue.normalize(defaultValue), null, + EvaluationOptions.EVENTS_WITHOUT_REASONS).getResult().getValue(); + } + + @Override + public EvaluationDetail boolVariationDetail(String featureKey, LDContext context, boolean defaultValue) { + return evaluator.evalAndFlag("LDClient.boolVariationDetail", featureKey, context, LDValue.of(defaultValue), LDValueType.BOOLEAN, + EvaluationOptions.EVENTS_WITH_REASONS).getResult().getAsBoolean(); + } + + @Override + public EvaluationDetail intVariationDetail(String featureKey, LDContext context, int defaultValue) { + return evaluator.evalAndFlag("LDClient.intVariationDetail", featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER, + EvaluationOptions.EVENTS_WITH_REASONS).getResult().getAsInteger(); + } + + @Override + public EvaluationDetail doubleVariationDetail(String featureKey, LDContext context, double defaultValue) { + return evaluator.evalAndFlag("LDClient.doubleVariationDetail", featureKey, context, LDValue.of(defaultValue), LDValueType.NUMBER, + EvaluationOptions.EVENTS_WITH_REASONS).getResult().getAsDouble(); + } + + @Override + public EvaluationDetail stringVariationDetail(String featureKey, LDContext context, String defaultValue) { + return evaluator.evalAndFlag("LDClient.stringVariationDetail", featureKey, context, LDValue.of(defaultValue), LDValueType.STRING, + EvaluationOptions.EVENTS_WITH_REASONS).getResult().getAsString(); + } + + @Override + public EvaluationDetail jsonValueVariationDetail(String featureKey, LDContext context, LDValue defaultValue) { + return evaluator.evalAndFlag("LDClient.jsonValueVariationDetail", featureKey, context, LDValue.normalize(defaultValue), null, + EvaluationOptions.EVENTS_WITH_REASONS).getResult().getAnyType(); + } + + @Override + public MigrationVariation migrationVariation(String key, LDContext context, MigrationStage defaultStage) { + // The migration evaluator is decorated with logic that will enforce the result is for a recognized migration + // stage or an error result is returned with the default stage value. This decorator was added as part of + // the Hooks implementation to ensure that the Hook would be given the result after that migration stage + // enforcement. + EvalResultAndFlag res = migrationEvaluator.evalAndFlag("LDClient.migrationVariation", key, context, LDValue.of(defaultStage.toString()), LDValueType.STRING, + EvaluationOptions.EVENTS_WITHOUT_REASONS); + + // since evaluation result inner types are boxed primitives, it is necessary to still make this mapping to the + // MigrationState type. + EvaluationDetail resDetail = res.getResult().getAsString(); + MigrationStage stageChecked = MigrationStage.of(resDetail.getValue(), defaultStage); + + long checkRatio = 1; + + if (res.getFlag() != null && + res.getFlag().getMigration() != null && + res.getFlag().getMigration().getCheckRatio() != null) { + checkRatio = res.getFlag().getMigration().getCheckRatio(); + } + + MigrationOpTracker tracker = new MigrationOpTracker(key, res.getFlag(), resDetail, defaultStage, + stageChecked, context, checkRatio, baseLogger); + return new MigrationVariation(stageChecked, tracker); + } + + @Override + public boolean isFlagKnown(String featureKey) { + if (!isInitialized()) { + if (dataStore.isInitialized()) { + baseLogger.warn("isFlagKnown called before client initialized for feature flag \"{}\"; using last known values from data store", featureKey); + } else { + baseLogger.warn("isFlagKnown called before client initialized for feature flag \"{}\"; data store unavailable, returning false", featureKey); + return false; + } + } + + try { + if (getFlag(dataStore, featureKey) != null) { + return true; + } + } catch (Exception e) { + baseLogger.error("Encountered exception while calling isFlagKnown for feature flag \"{}\": {}", featureKey, + LogValues.exceptionSummary(e)); + baseLogger.debug("{}", LogValues.exceptionTrace(e)); + } + + return false; + } + + @Override + public FlagTracker getFlagTracker() { + return flagTracker; + } + + @Override + public BigSegmentStoreStatusProvider getBigSegmentStoreStatusProvider() { + return bigSegmentStoreStatusProvider; + } + + @Override + public DataStoreStatusProvider getDataStoreStatusProvider() { + return dataStoreStatusProvider; + } + + @Override + public LDLogger getLogger() { + return baseLogger; + } + + @Override + public DataSourceStatusProvider getDataSourceStatusProvider() { + return dataSourceStatusProvider; + } + + /** + * Shuts down the client and releases any resources it is using. + *

    + * Unless it is offline, the client will attempt to deliver any pending analytics events before + * closing. + */ + @Override + public void close() throws IOException { + baseLogger.info("Closing LaunchDarkly Client"); + this.dataStore.close(); + this.eventProcessor.close(); + this.dataSource.close(); + this.dataSourceUpdates.updateStatus(DataSourceStatusProvider.State.OFF, null); + if (this.bigSegmentStoreWrapper != null) { + this.bigSegmentStoreWrapper.close(); + } + this.sharedExecutor.shutdownNow(); + } + + @Override + public void flush() { + this.eventProcessor.flush(); + } + + @Override + public boolean isOffline() { + return offline; + } + + @Override + public String secureModeHash(LDContext context) { + if (context == null || !context.isValid()) { + return null; + } + try { + Mac mac = Mac.getInstance(HMAC_ALGORITHM); + mac.init(new SecretKeySpec(sdkKey.getBytes(), HMAC_ALGORITHM)); + return Hex.encodeHexString(mac.doFinal(context.getFullyQualifiedKey().getBytes("UTF8"))); + } catch (InvalidKeyException | UnsupportedEncodingException | NoSuchAlgorithmException e) { + // COVERAGE: there is no way to cause these errors in a unit test. + baseLogger.error("Could not generate secure mode hash: {}", LogValues.exceptionSummary(e)); + baseLogger.debug("{}", LogValues.exceptionTrace(e)); + } + return null; + } + + /** + * Returns the current version string of the client library. + * + * @return a version string conforming to Semantic Versioning (http://semver.org) + */ + @Override + public String version() { + return Version.SDK_VERSION; + } + + // This executor is used for a variety of SDK tasks such as flag change events, checking the data store + // status after an outage, and the poll task in polling mode. These are all tasks that we do not expect + // to be executing frequently so that it is acceptable to use a single thread to execute them one at a + // time rather than a thread pool, thus reducing the number of threads spawned by the SDK. This also + // has the benefit of producing predictable delivery order for event listener notifications. + private ScheduledExecutorService createSharedExecutor(LDConfig config) { + ThreadFactory threadFactory = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat("LaunchDarkly-tasks-%d") + .setPriority(config.threadPriority) + .build(); + return Executors.newSingleThreadScheduledExecutor(threadFactory); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDConfig.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDConfig.java new file mode 100644 index 0000000..f0fac29 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/LDConfig.java @@ -0,0 +1,396 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder; +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; +import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints; +import com.launchdarkly.sdk.server.interfaces.WrapperInfo; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.HookConfiguration; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import java.time.Duration; + +/** + * This class exposes advanced configuration options for the {@link LDClient}. Instances of this class must be constructed with a {@link com.launchdarkly.sdk.server.LDConfig.Builder}. + */ +public final class LDConfig { + /** + * The default value for {@link Builder#startWait(Duration)}: 5 seconds. + */ + public static final Duration DEFAULT_START_WAIT = Duration.ofSeconds(5); + + protected static final LDConfig DEFAULT = new Builder().build(); + + final ApplicationInfo applicationInfo; + final ComponentConfigurer bigSegments; + final ComponentConfigurer dataSource; + final ComponentConfigurer dataStore; + final boolean diagnosticOptOut; + final ComponentConfigurer events; + final HookConfiguration hooks; + final ComponentConfigurer http; + final ComponentConfigurer logging; + final ServiceEndpoints serviceEndpoints; + final boolean offline; + final Duration startWait; + final int threadPriority; + final WrapperInfo wrapperInfo; + + protected LDConfig(Builder builder) { + if (builder.offline) { + this.dataSource = Components.externalUpdatesOnly(); + this.events = Components.noEvents(); + } else { + this.dataSource = builder.dataSource == null ? Components.streamingDataSource() : builder.dataSource; + this.events = builder.events == null ? Components.sendEvents() : builder.events; + } + this.applicationInfo = (builder.applicationInfoBuilder == null ? Components.applicationInfo() : + builder.applicationInfoBuilder) + .createApplicationInfo(); + this.bigSegments = builder.bigSegments == null ? Components.bigSegments(null) : builder.bigSegments; + this.dataStore = builder.dataStore == null ? Components.inMemoryDataStore() : builder.dataStore; + this.diagnosticOptOut = builder.diagnosticOptOut; + this.hooks = (builder.hooksConfigurationBuilder == null ? Components.hooks() : builder.hooksConfigurationBuilder).build(); + this.http = builder.http == null ? Components.httpConfiguration() : builder.http; + this.logging = builder.logging == null ? Components.logging() : builder.logging; + this.offline = builder.offline; + this.serviceEndpoints = (builder.serviceEndpointsBuilder == null ? Components.serviceEndpoints() : + builder.serviceEndpointsBuilder) + .createServiceEndpoints(); + this.startWait = builder.startWait; + this.threadPriority = builder.threadPriority; + this.wrapperInfo = builder.wrapperBuilder != null ? builder.wrapperBuilder.build() : null; + } + + /** + * A builder that helps construct + * {@link com.launchdarkly.sdk.server.LDConfig} objects. Builder calls can be chained, enabling the + * following pattern: + *

    +   * LDConfig config = new LDConfig.Builder()
    +   *      .connectTimeoutMillis(3)
    +   *      .socketTimeoutMillis(3)
    +   *      .build()
    +   * 
    + */ + public static class Builder { + private ApplicationInfoBuilder applicationInfoBuilder = null; + private ComponentConfigurer bigSegments = null; + private ComponentConfigurer dataSource = null; + private ComponentConfigurer dataStore = null; + private boolean diagnosticOptOut = false; + private ComponentConfigurer events = null; + private HooksConfigurationBuilder hooksConfigurationBuilder = null; + private ComponentConfigurer http = null; + private ComponentConfigurer logging = null; + private ServiceEndpointsBuilder serviceEndpointsBuilder = null; + private boolean offline = false; + private Duration startWait = DEFAULT_START_WAIT; + private int threadPriority = Thread.MIN_PRIORITY; + private WrapperInfoBuilder wrapperBuilder = null; + + /** + * Creates a builder with all configuration parameters set to the default + */ + public Builder() { + } + + /** + * Creates a {@link LDConfig.Builder} from the provided {@link LDConfig} + * + * @param config to be used to initialize the builder + * @return the builder + */ + public static Builder fromConfig(LDConfig config) { + Builder newBuilder = new Builder(); + newBuilder.applicationInfoBuilder = ApplicationInfoBuilder.fromApplicationInfo(config.applicationInfo); + newBuilder.bigSegments = config.bigSegments; + newBuilder.dataSource = config.dataSource; + newBuilder.dataStore = config.dataStore; + newBuilder.diagnosticOptOut = config.diagnosticOptOut; + newBuilder.events = config.events; + newBuilder.hooksConfigurationBuilder = ComponentsImpl.HooksConfigurationBuilderImpl.fromHooksConfiguration(config.hooks); + newBuilder.http = config.http; + newBuilder.logging = config.logging; + + newBuilder.serviceEndpointsBuilder = ComponentsImpl.ServiceEndpointsBuilderImpl + .fromServiceEndpoints(config.serviceEndpoints); + newBuilder.offline = config.offline; + newBuilder.startWait = config.startWait; + newBuilder.threadPriority = config.threadPriority; + newBuilder.wrapperBuilder = config.wrapperInfo != null ? + ComponentsImpl.WrapperInfoBuilderImpl.fromInfo(config.wrapperInfo) : null; + return newBuilder; + } + + /** + * Sets the SDK's application metadata, which may be used in LaunchDarkly analytics or other product features, + * but does not affect feature flag evaluations. + *

    + * This object is normally a configuration builder obtained from {@link Components#applicationInfo()}, + * which has methods for setting individual logging-related properties. + * + * @param applicationInfoBuilder a configuration builder object returned by {@link Components#applicationInfo()} + * @return the builder + * @since 5.8.0 + */ + public Builder applicationInfo(ApplicationInfoBuilder applicationInfoBuilder) { + this.applicationInfoBuilder = applicationInfoBuilder; + return this; + } + + /** + * Sets the configuration of the SDK's Big Segments feature. + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + *

    + * If you are using this feature, you will normally specify a database implementation that + * matches how the LaunchDarkly Relay Proxy is configured, since the Relay Proxy manages the + * Big Segment data. + *

    + * By default, there is no implementation and Big Segments cannot be evaluated. In this case, + * any flag evaluation that references a Big Segment will behave as if no users are included in + * any Big Segments, and the {@link EvaluationReason} associated with any such flag evaluation + * will have a {@link BigSegmentsStatus} of {@link BigSegmentsStatus#NOT_CONFIGURED}. + * + *

    
    +     *     // This example uses the Redis integration
    +     *     LDConfig config = LDConfig.builder()
    +     *         .bigSegments(Components.bigSegments(Redis.dataStore().prefix("app1"))
    +     *             .userCacheSize(2000))
    +     *         .build();
    +     * 
    + * + * @param bigSegmentsConfigurer the Big Segments configuration builder + * @return the builder + * @since 5.7.0 + * @see Components#bigSegments(ComponentConfigurer) + */ + public Builder bigSegments(ComponentConfigurer bigSegmentsConfigurer) { + this.bigSegments = bigSegmentsConfigurer; + return this; + } + + /** + * Sets the implementation of the component that receives feature flag data from LaunchDarkly, + * using a factory object. Depending on the implementation, the factory may be a builder that + * allows you to set other configuration options as well. + *

    + * The default is {@link Components#streamingDataSource()}. You may instead use + * {@link Components#pollingDataSource()}, or a test fixture such as + * {@link com.launchdarkly.sdk.server.integrations.FileData#dataSource()}. See those methods + * for details on how to configure them. + * + * @param dataSourceConfigurer the data source configuration builder + * @return the main configuration builder + * @since 4.12.0 + */ + public Builder dataSource(ComponentConfigurer dataSourceConfigurer) { + this.dataSource = dataSourceConfigurer; + return this; + } + + /** + * Sets the implementation of the data store to be used for holding feature flags and + * related data received from LaunchDarkly, using a factory object. The default is + * {@link Components#inMemoryDataStore()}; for database integrations, use + * {@link Components#persistentDataStore(ComponentConfigurer)}. + * + * @param dataStoreConfigurer the data store configuration builder + * @return the main configuration builder + * @since 4.12.0 + */ + public Builder dataStore(ComponentConfigurer dataStoreConfigurer) { + this.dataStore = dataStoreConfigurer; + return this; + } + + /** + * Set to true to opt out of sending diagnostics data. + *

    + * Unless {@code diagnosticOptOut} is set to true, the client will send some diagnostics data to the + * LaunchDarkly servers in order to assist in the development of future SDK improvements. These diagnostics + * consist of an initial payload containing some details of SDK in use, the SDK's configuration, and the platform + * the SDK is being run on; as well as payloads sent periodically with information on irregular occurrences such + * as dropped events. + * + * @see com.launchdarkly.sdk.server.integrations.EventProcessorBuilder#diagnosticRecordingInterval(Duration) + * + * @param diagnosticOptOut true if you want to opt out of sending any diagnostics data + * @return the builder + * @since 4.12.0 + */ + public Builder diagnosticOptOut(boolean diagnosticOptOut) { + this.diagnosticOptOut = diagnosticOptOut; + return this; + } + + /** + * Sets the implementation of {@link EventProcessor} to be used for processing analytics events. + *

    + * The default is {@link Components#sendEvents()} with no custom options. You may instead call + * {@link Components#sendEvents()} and then set custom options for event processing; or, disable + * events with {@link Components#noEvents()}; or, choose to use a custom implementation (for + * instance, a test fixture). + * + * @param eventsConfigurer the events configuration builder + * @return the main configuration builder + * @since 4.12.0 + * @see Components#sendEvents() + * @see Components#noEvents() + */ + public Builder events(ComponentConfigurer eventsConfigurer) { + this.events = eventsConfigurer; + return this; + } + + /** + * Sets the SDK's hooks configuration, using a builder. This is normally a obtained from + * {@link Components#hooks()} ()}, which has methods for setting individual other hook + * related properties. + * + * @param hooksConfiguration the hooks configuration builder + * @return the main configuration builder + * @see Components#hooks() + */ + public Builder hooks(HooksConfigurationBuilder hooksConfiguration) { + this.hooksConfigurationBuilder = hooksConfiguration; + return this; + } + + /** + * Sets the SDK's networking configuration, using a configuration builder. This builder is + * obtained from {@link Components#httpConfiguration()}, and has methods for setting individual + * HTTP-related properties. + * + * @param httpConfigurer the HTTP configuration builder + * @return the main configuration builder + * @since 4.13.0 + * @see Components#httpConfiguration() + */ + public Builder http(ComponentConfigurer httpConfigurer) { + this.http = httpConfigurer; + return this; + } + + /** + * Sets the SDK's logging configuration, using a factory object. This object is normally a + * configuration builder obtained from {@link Components#logging()}, which has methods + * for setting individual logging-related properties. + * + * @param loggingConfigurer the logging configuration builder + * @return the main configuration builder + * @since 5.0.0 + * @see Components#logging() + */ + public Builder logging(ComponentConfigurer loggingConfigurer) { + this.logging = loggingConfigurer; + return this; + } + + /** + * Set whether this client is offline. + *

    + * In offline mode, the SDK will not make network connections to LaunchDarkly for any purpose. Feature + * flag data will only be available if it already exists in the data store, and analytics events will + * not be sent. + *

    + * This is equivalent to calling {@code dataSource(Components.externalUpdatesOnly())} and + * {@code events(Components.noEvents())}. It overrides any other values you may have set for + * {@link #dataSource(ComponentConfigurer)} or {@link #events(ComponentConfigurer)}. + * + * @param offline when set to true no calls to LaunchDarkly will be made + * @return the builder + */ + public Builder offline(boolean offline) { + this.offline = offline; + return this; + } + + /** + * Sets the base service URIs used by SDK components. + *

    + * This object is normally a configuration builder obtained from {@link Components#serviceEndpoints()}, + * which has methods for setting each external endpoint to a custom URI. + * + * @param serviceEndpointsBuilder a configuration builder object returned by {@link Components#applicationInfo()} + * @return the builder + * @since 5.9.0 + */ + public Builder serviceEndpoints(ServiceEndpointsBuilder serviceEndpointsBuilder) { + this.serviceEndpointsBuilder = serviceEndpointsBuilder; + return this; + } + + /** + * Set how long the constructor will block awaiting a successful connection to LaunchDarkly. + * Setting this to a zero or negative duration will not block and cause the constructor to return immediately. + *

    + * The default is {@link #DEFAULT_START_WAIT}. + * + * @param startWait maximum time to wait; null to use the default + * @return the builder + */ + public Builder startWait(Duration startWait) { + this.startWait = startWait == null ? DEFAULT_START_WAIT : startWait; + return this; + } + + /** + * Set the priority to use for all threads created by the SDK. + *

    + * By default, the SDK's worker threads use {@code Thread.MIN_PRIORITY} so that they will yield to + * application threads if the JVM is busy. You may increase this if you want the SDK to be prioritized + * over some other low-priority tasks. + *

    + * Values outside the range of [{@code Thread.MIN_PRIORITY}, {@code Thread.MAX_PRIORITY}] will be set + * to the minimum or maximum. + * + * @param threadPriority the priority for SDK threads + * @return the builder + * @since 5.0.0 + */ + public Builder threadPriority(int threadPriority) { + this.threadPriority = Math.max(Thread.MIN_PRIORITY, Math.min(Thread.MAX_PRIORITY, threadPriority)); + return this; + } + + /** + * Set the wrapper information. + *

    + * This is intended for use with wrapper SDKs from LaunchDarkly. + *

    + * If the WrapperBuilder is set, then it will replace the wrapper information from the HttpPropertiesBuilder. + * Additionally, any wrapper SDK may overwrite any application developer provided wrapper information. + * + * @param wrapperBuilder the wrapper builder + * @return the builder + * @since 7.1.0 + */ + public Builder wrapper(WrapperInfoBuilder wrapperBuilder) { + this.wrapperBuilder = wrapperBuilder; + return this; + } + + /** + * Builds the configured {@link com.launchdarkly.sdk.server.LDConfig} object. + * + * @return the {@link com.launchdarkly.sdk.server.LDConfig} configured by this builder + */ + public LDConfig build() { + return new LDConfig(this); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Loggers.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Loggers.java new file mode 100644 index 0000000..823aa6a --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Loggers.java @@ -0,0 +1,27 @@ +package com.launchdarkly.sdk.server; + +/** + * Static logger instances to be shared by implementation code in the main {@code com.launchdarkly.sdk.server} + * package. + *

    + * The goal here is 1. to centralize logger references rather than having many calls to + * {@code LoggerFactory.getLogger()} all over the code, and 2. to encourage usage of a basic set of + * logger names that are not tied to class names besides the main LDClient class. Most class names in + * the SDK are package-private implementation details that are not meaningful to users, so in terms of + * both being able to see the relevant area of functionality at a glance when reading a log and also + * convenience in defining SLF4J logger name filters, it is preferable to use these stable names. + *

    + * Code in other packages such as {@code com.launchdarkly.sdk.server.integrations} cannot use these + * package-private fields, but should still use equivalent logger names as appropriate. + */ +abstract class Loggers { + private Loggers() {} + + static final String BASE_LOGGER_NAME = LDClient.class.getName(); + static final String BIG_SEGMENTS_LOGGER_NAME = "BigSegments"; + static final String DATA_SOURCE_LOGGER_NAME = "DataSource"; + static final String DATA_STORE_LOGGER_NAME = "DataStore"; + static final String EVALUATION_LOGGER_NAME = "Evaluation"; + static final String EVENTS_LOGGER_NAME = "Events"; + static final String HOOKS_LOGGER_NAME = "Hooks"; +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOp.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOp.java new file mode 100644 index 0000000..bb96a58 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOp.java @@ -0,0 +1,20 @@ +package com.launchdarkly.sdk.server; + +/** + * The type of migration operation. + */ +public enum MigrationOp { + READ("read"), + WRITE("write"); + + private final String strValue; + + MigrationOp(final String strValue) { + this.strValue = strValue; + } + + @Override + public String toString() { + return strValue; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOpTracker.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOpTracker.java new file mode 100644 index 0000000..ac0b5f2 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOpTracker.java @@ -0,0 +1,307 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.internal.events.Sampler; +import com.launchdarkly.sdk.server.interfaces.ConsistencyCheck; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; +import java.util.Optional; + +/** + * Used to track information related to a migration operation. + */ +public class MigrationOpTracker { + + private boolean oldError = false; + private boolean newError = false; + + private boolean oldInvoked = false; + + private boolean newInvoked = false; + + private Duration oldLatency = null; + private Duration newLatency = null; + + private MigrationOp operation = null; + + private ConsistencyCheck consistencyCheck = ConsistencyCheck.NOT_CHECKED; + + private final DataModel.FeatureFlag flag; + + private final MigrationStage stage; + + private final MigrationStage defaultStage; + + private final EvaluationDetail evaluationDetail; + + private final LDContext context; + + private final String flagKey; + + private final long checkRatio; + + private final LDLogger logger; + + /** + * Interface for specifying the callback function for the consistency check. + * This should remain a SAM (Single Abstract Method) to facilitate a lambda callback. + */ + public interface Checker { + boolean check(); + } + + MigrationOpTracker( + @NotNull String flagKey, + @Nullable DataModel.FeatureFlag flag, + @NotNull EvaluationDetail evaluationDetail, + @NotNull MigrationStage defaultStage, + @NotNull MigrationStage stage, + @NotNull LDContext context, + long checkRatio, + @NotNull LDLogger logger) { + this.flag = flag; + this.stage = stage; + this.defaultStage = defaultStage; + this.evaluationDetail = evaluationDetail; + this.context = context; + this.flagKey = flagKey; + this.checkRatio = checkRatio; + this.logger = logger; + } + + /** + * Sets the migration related operation associated with these tracking measurements. + * + * @param op the operation being tracked + */ + public synchronized void op(@NotNull MigrationOp op) { + operation = op; + } + + /** + * Report that an error has occurred for the specified origin. + * + * @param origin the origin of the error + */ + public synchronized void error(@NotNull MigrationOrigin origin) { + switch (origin) { + case OLD: + oldError = true; + break; + case NEW: + newError = true; + break; + } + } + + /** + * Check the consistency of a read result. This method should be invoked if the `check` function + * is defined for the migration and both reads ("new"/"old") were done. + *

    + * The function will use the checkRatio to determine if the check should be executed, and it + * will record the result. + *

    + * If the consistency check function throws an exception, then no measurement for consistency will be included + * in the generated migration op event. + *

    + * Example calling the check function from the migration config. + *

    +   * if (checker != null &&
    +   *   oldResult.success &&
    +   *   newResult.success
    +   * ) {
    +   *   // Temporary variables for the lambda invocation.
    +   *   MigrationResult@lt;TReadResult@gt; finalNewResult = newResult;
    +   *   MigrationResult@lt;TReadResult@gt; finalOldResult = oldResult;
    +   *
    +   *   tracker.consistency(() -> checker.check(finalOldResult.result,
    +   *   finalNewResult.result));
    +   * }
    +   * 
    + * + * @param checker The function which executes the check. This is not the `check` function from the + * migration options, but instead should be a parameter-less function that calls that function. + */ + public synchronized void consistency(@NotNull Checker checker) { + if (Sampler.shouldSample(checkRatio)) { + try { + consistencyCheck = checker.check() ? ConsistencyCheck.CONSISTENT : ConsistencyCheck.INCONSISTENT; + } catch(Exception e) { + logger.error("Exception when executing consistency check function for migration '{}' the consistency" + + " check will not be included in the generated migration op event. Exception: {}", flagKey, e); + } + } + } + + /** + * Report the latency of an operation. + * + * @param origin the origin the latency is being reported for + * @param duration the latency of the operation + */ + public synchronized void latency(@NotNull MigrationOrigin origin, @NotNull Duration duration) { + switch (origin) { + case OLD: + oldLatency = duration; + break; + case NEW: + newLatency = duration; + break; + } + } + + /** + * Call this to report that an origin was invoked (executed). There are some situations where the + * expectation is that both the old and new implementation will be used, but with writes + * it is possible that the non-authoritative will not execute. Reporting the execution allows + * for more accurate analytics. + * + * @param origin the origin that was invoked + */ + public synchronized void invoked(@NotNull MigrationOrigin origin) { + switch (origin) { + case OLD: + oldInvoked = true; + break; + case NEW: + newInvoked = true; + break; + } + } + + private boolean invokedForOrigin(MigrationOrigin origin) { + if (origin == MigrationOrigin.OLD) { + return oldInvoked; + } + return newInvoked; + } + + private Duration latencyForOrigin(MigrationOrigin origin) { + if (origin == MigrationOrigin.OLD) { + return oldLatency; + } + return newLatency; + } + + private boolean errorForOrigin(MigrationOrigin origin) { + if (origin == MigrationOrigin.OLD) { + return oldError; + } + return newError; + } + + private boolean checkOriginEventConsistency(MigrationOrigin origin) { + if (invokedForOrigin(origin)) { + return true; + } + + // The origin was not invoked so any measurements involving it represent an inconsistency. + + String logTag = String.format("For migration op(%s) flagKey(%s):", operation, flagKey); + + if (latencyForOrigin(origin) != null) { + logger.error("{} Latency was recorded for {}, but that origin was not invoked.", logTag, origin); + return false; + } + + if (errorForOrigin(origin)) { + logger.error("{} Error reported for {}, but that origin was not invoked.", logTag, origin); + return false; + } + + if (this.consistencyCheck != ConsistencyCheck.NOT_CHECKED) { + logger.error("{} Consistency check was done, but {} was not invoked." + + " Both \"old\" and \"new\" must be invoked to do a comparison.", logTag, origin); + return false; + } + return true; + } + + /** + * Check for inconsistencies in the data used to generate an event. + * Log any inconsistencies found. + */ + private boolean checkEventConsistency() { + return checkOriginEventConsistency(MigrationOrigin.OLD) && + checkOriginEventConsistency(MigrationOrigin.NEW); + } + + synchronized Optional createEvent() { + if(flagKey.isEmpty()) { + logger.error("The migration was executed against an empty flag key and no event will be created."); + return Optional.empty(); + } + if (operation == null) { + logger.error("The operation must be set, using \"op\" before an event can be created."); + return Optional.empty(); + } + if (!newInvoked && !oldInvoked) { + logger.error("The migration invoked neither the \"old\" or \"new\" implementation" + + " and an event cannot be generated."); + return Optional.empty(); + } + if (!context.isValid()) { + logger.error("The migration was not done against a valid context and cannot generate an event."); + return Optional.empty(); + } + + if(!checkEventConsistency()) { + return Optional.empty(); + } + + long samplingRatio = 1; + if (flag != null && flag.getSamplingRatio() != null) { + samplingRatio = flag.getSamplingRatio(); + } + + int flagVersion = -1; + flagVersion = flag != null ? flag.getVersion() : -1; + + Event.MigrationOp.InvokedMeasurement invokedMeasurement = + new Event.MigrationOp.InvokedMeasurement(oldInvoked, newInvoked); + + + Event.MigrationOp.LatencyMeasurement latencyMeasurement = null; + if (oldLatency != null | newLatency != null) { + latencyMeasurement = new Event.MigrationOp.LatencyMeasurement( + oldLatency != null ? oldLatency.toMillis() : null, + newLatency != null ? newLatency.toMillis() : null); + } + + Event.MigrationOp.ConsistencyMeasurement consistencyMeasurement = null; + if (consistencyCheck != ConsistencyCheck.NOT_CHECKED) { + consistencyMeasurement = new Event.MigrationOp.ConsistencyMeasurement( + consistencyCheck == ConsistencyCheck.CONSISTENT, + checkRatio); + } + + Event.MigrationOp.ErrorMeasurement errorMeasurement = null; + + if (oldError || newError) { + errorMeasurement = new Event.MigrationOp.ErrorMeasurement(oldError, newError); + } + + return Optional.of(new Event.MigrationOp( + System.currentTimeMillis(), + context, + flagKey, + evaluationDetail.getVariationIndex(), + flagVersion, + LDValue.of(stage.toString()), + LDValue.of(defaultStage.toString()), + evaluationDetail.getReason(), + samplingRatio, + operation.toString(), + invokedMeasurement, + consistencyMeasurement, + latencyMeasurement, + errorMeasurement + )); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOrigin.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOrigin.java new file mode 100644 index 0000000..acd559a --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationOrigin.java @@ -0,0 +1,16 @@ +package com.launchdarkly.sdk.server; + +/** + * The origin/source for a migration step. + */ +public enum MigrationOrigin { + /** + * The "old" implementation. + */ + OLD, + + /** + * The "new" implementation. + */ + NEW +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStage.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStage.java new file mode 100644 index 0000000..4bac91a --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStage.java @@ -0,0 +1,78 @@ +package com.launchdarkly.sdk.server; + +import java.util.Arrays; + +/** + * Stage denotes one of six possible stages a technology migration could be a + * part of, progressing through the following order. + *

    + * Off DualWrite Shadow Live RampDown Complete + */ +public enum MigrationStage { + /** + * Off - migration hasn't started, "old" is authoritative for reads and writes + */ + OFF("off"), + + /** + * DualWrite - write to both "old" and "new", "old" is authoritative for reads + */ + DUAL_WRITE("dualwrite"), + + /** + * Shadow - both "new" and "old" versions run with a preference for "old" + */ + SHADOW("shadow"), + + /** + * Live - both "new" and "old" versions run with a preference for "new" + */ + LIVE("live"), + + /** + * RampDown - only read from "new", write to "old" and "new" + */ + RAMP_DOWN("rampdown"), + + /** + * Complete - migration is done + */ + COMPLETE("complete"); + + private final String strValue; + + MigrationStage(final String strValue) { + this.strValue = strValue; + } + + @Override + public String toString() { + return strValue; + } + + /** + * Check if the provided string is a migration stage. + * + * @param strValue The string to check. + * @return True if the string represents a migration stage. + */ + public static boolean isStage(String strValue) { + return Arrays.stream(MigrationStage.values()).anyMatch(item -> item.strValue.equals(strValue)); + } + + /** + * Convert a string into a migration stage. + *

    + * If the string is not a stage, then the provided default will be returned. + * + * @param strValue The string to convert. + * @param defaultStage The default value to use if the string does not represent a migration stage. + * @return The converted migration stage. + */ + public static MigrationStage of(String strValue, MigrationStage defaultStage) { + return Arrays.stream(MigrationStage.values()) + .filter(item -> item.strValue.equals(strValue)) + .findFirst() + .orElse(defaultStage); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStageEnforcingEvaluator.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStageEnforcingEvaluator.java new file mode 100644 index 0000000..bdf2be3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationStageEnforcingEvaluator.java @@ -0,0 +1,42 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; + +/** + * This class exists to enforce that migration variation results are stages from the {@link MigrationStage} enum. + */ +class MigrationStageEnforcingEvaluator implements EvaluatorInterface { + + private final EvaluatorInterface underlyingEvaluator; + private final LDLogger logger; + + MigrationStageEnforcingEvaluator(EvaluatorInterface underlyingEvaluator, LDLogger logger) { + this.underlyingEvaluator = underlyingEvaluator; + this.logger = logger; + } + + @Override + public EvalResultAndFlag evalAndFlag(String method, String flagKey, LDContext context, LDValue defaultValue, LDValueType requireType, EvaluationOptions options) { + EvalResultAndFlag res = underlyingEvaluator.evalAndFlag(method, flagKey, context, defaultValue, requireType, options); + + EvaluationDetail resDetail = res.getResult().getAsString(); + String resStageString = resDetail.getValue(); + if (!MigrationStage.isStage(resStageString)) { + logger.error("Unrecognized MigrationState for \"{}\"; returning default value.", flagKey); + return new EvalResultAndFlag(EvalResult.error(EvaluationReason.ErrorKind.WRONG_TYPE, defaultValue), res.getFlag()); + } + + return res; + } + + @Override + public FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options) { + // this decorator is a pass through for the all flag state case + return underlyingEvaluator.allFlagsState(context, options); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationVariation.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationVariation.java new file mode 100644 index 0000000..d41ef98 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/MigrationVariation.java @@ -0,0 +1,36 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import org.jetbrains.annotations.NotNull; + +/** + * Result of an {@link LDClient#migrationVariation(String, LDContext, MigrationStage)} call. + *

    + * This includes the stage of the migration as well as a tracker. + */ +public final class MigrationVariation { + private final MigrationStage stage; + private final MigrationOpTracker tracker; + + public MigrationVariation(@NotNull MigrationStage stage, @NotNull MigrationOpTracker tracker) { + this.stage = stage; + this.tracker = tracker; + } + + /** + * The result of the flag evaluation. This will be either one of the flag's variations or + * the default value that was passed to {@link LDClient#migrationVariation(String, LDContext, MigrationStage)}. + * @return The migration stage. + */ + public MigrationStage getStage() { + return this.stage; + } + + /** + * A tracker which can be used to generate analytics for the migration. + * @return The tracker. + */ + public MigrationOpTracker getTracker() { + return this.tracker; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/NoOpEventProcessor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/NoOpEventProcessor.java new file mode 100644 index 0000000..6068496 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/NoOpEventProcessor.java @@ -0,0 +1,47 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; + +import java.io.IOException; + +/** + * An {@link EventProcessor} that does nothing when invoked. + */ +class NoOpEventProcessor implements EventProcessor { + + @Override + public void recordEvaluationEvent(LDContext context, String flagKey, int flagVersion, int variation, LDValue value, + EvaluationReason reason, LDValue defaultValue, String prerequisiteOfFlagKey, + boolean requireFullEvent, Long debugEventsUntilDate, boolean excludeFromSummaries, + Long samplingRatio) { + // no-op + } + + @Override + public void recordIdentifyEvent(LDContext context) { + // no-op + } + + @Override + public void recordCustomEvent(LDContext context, String eventKey, LDValue data, Double metricValue) { + // no-op + } + + @Override + public void recordMigrationEvent(MigrationOpTracker tracker) { + // no-op + } + + @Override + public void flush() { + // no-op + } + + @Override + public void close() throws IOException { + // no-op + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreStatusManager.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreStatusManager.java new file mode 100644 index 0000000..2d8d4cc --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreStatusManager.java @@ -0,0 +1,110 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.Status; + +import java.io.Closeable; +import java.util.concurrent.Callable; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +/** + * Used internally to encapsulate the data store status broadcasting mechanism for PersistentDataStoreWrapper. + *

    + * This is currently only used by PersistentDataStoreWrapper, but encapsulating it in its own class helps with + * clarity and also lets us reuse this logic in tests. + */ +final class PersistentDataStoreStatusManager implements Closeable { + static final int POLL_INTERVAL_MS = 500; // visible for testing + + private final Consumer statusUpdater; + private final ScheduledExecutorService scheduler; + private final Callable statusPollFn; + private final boolean refreshOnRecovery; + private final LDLogger logger; + private volatile boolean lastAvailable; + private volatile ScheduledFuture pollerFuture; + + PersistentDataStoreStatusManager( + boolean refreshOnRecovery, + boolean availableNow, + Callable statusPollFn, + Consumer statusUpdater, + ScheduledExecutorService sharedExecutor, + LDLogger logger + ) { + this.refreshOnRecovery = refreshOnRecovery; + this.lastAvailable = availableNow; + this.statusPollFn = statusPollFn; + this.statusUpdater = statusUpdater; + this.scheduler = sharedExecutor; + this.logger = logger; + } + + public void close() { + synchronized (this) { + if (pollerFuture != null) { + pollerFuture.cancel(true); + pollerFuture = null; + } + } + } + + void updateAvailability(boolean available) { + synchronized (this) { + if (lastAvailable == available) { + return; + } + lastAvailable = available; + } + + Status status = new Status(available, available && refreshOnRecovery); + + if (available) { + logger.warn("Persistent store is available again"); + } + + statusUpdater.accept(status); + + // If the store has just become unavailable, start a poller to detect when it comes back. If it has + // become available, stop any polling we are currently doing. + if (available) { + synchronized (this) { + if (pollerFuture != null) { + pollerFuture.cancel(false); + pollerFuture = null; + } + } + } else { + logger.warn("Detected persistent store unavailability; updates will be cached until it recovers"); + + // Start polling until the store starts working again + Runnable pollerTask = new Runnable() { + public void run() { + try { + if (statusPollFn.call()) { + updateAvailability(true); + } + } catch (Exception e) { + logger.error("Unexpected error from data store status function: {}", LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + } + } + }; + synchronized (this) { + if (pollerFuture == null) { + pollerFuture = scheduler.scheduleAtFixedRate( + pollerTask, + POLL_INTERVAL_MS, + POLL_INTERVAL_MS, + TimeUnit.MILLISECONDS + ); + } + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java new file mode 100644 index 0000000..b58adf3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapper.java @@ -0,0 +1,463 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.base.Optional; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.common.util.concurrent.UncheckedExecutionException; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreUpdateSink; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import java.io.IOException; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.google.common.collect.Iterables.concat; +import static com.google.common.collect.Iterables.filter; +import static com.google.common.collect.Iterables.isEmpty; + +/** + * Package-private implementation of {@link DataStore} that delegates the basic functionality to an + * instance of {@link PersistentDataStore}. It provides optional caching behavior and other logic that + * would otherwise be repeated in every data store implementation. This makes it easier to create new + * database integrations by implementing only the database-specific logic. + *

    + * This class is only constructed by {@link PersistentDataStoreBuilder}. + */ +final class PersistentDataStoreWrapper implements DataStore { + private final PersistentDataStore core; + private final LoadingCache> itemCache; + private final LoadingCache> allCache; + private final LoadingCache initCache; + private final PersistentDataStoreStatusManager statusManager; + private final boolean cacheIndefinitely; + private final Set cachedDataKinds = new HashSet<>(); // this map is used in pollForAvailability() + private final AtomicBoolean inited = new AtomicBoolean(false); + private final ListeningExecutorService cacheExecutor; + private final LDLogger logger; + + PersistentDataStoreWrapper( + final PersistentDataStore core, + Duration cacheTtl, + PersistentDataStoreBuilder.StaleValuesPolicy staleValuesPolicy, + boolean recordCacheStats, + DataStoreUpdateSink dataStoreUpdates, + ScheduledExecutorService sharedExecutor, + LDLogger logger + ) { + this.core = core; + this.logger = logger; + + if (cacheTtl.isZero()) { + itemCache = null; + allCache = null; + initCache = null; + cacheExecutor = null; + cacheIndefinitely = false; + } else { + cacheIndefinitely = cacheTtl.isNegative(); + CacheLoader> itemLoader = new CacheLoader>() { + @Override + public Optional load(CacheKey key) throws Exception { + return Optional.fromNullable(getAndDeserializeItem(key.kind, key.key)); + } + }; + CacheLoader> allLoader = new CacheLoader>() { + @Override + public KeyedItems load(DataKind kind) throws Exception { + return getAllAndDeserialize(kind); + } + }; + CacheLoader initLoader = new CacheLoader() { + @Override + public Boolean load(String key) throws Exception { + return core.isInitialized(); + } + }; + + if (staleValuesPolicy == PersistentDataStoreBuilder.StaleValuesPolicy.REFRESH_ASYNC) { + cacheExecutor = MoreExecutors.listeningDecorator(sharedExecutor); + + // Note that the REFRESH_ASYNC mode is only used for itemCache, not allCache, since retrieving all flags is + // less frequently needed and we don't want to incur the extra overhead. + itemLoader = CacheLoader.asyncReloading(itemLoader, cacheExecutor); + } else { + cacheExecutor = null; + } + + itemCache = newCacheBuilder(cacheTtl, staleValuesPolicy, recordCacheStats).build(itemLoader); + allCache = newCacheBuilder(cacheTtl, staleValuesPolicy, recordCacheStats).build(allLoader); + initCache = newCacheBuilder(cacheTtl, staleValuesPolicy, recordCacheStats).build(initLoader); + } + statusManager = new PersistentDataStoreStatusManager( + !cacheIndefinitely, + true, + this::pollAvailabilityAfterOutage, + dataStoreUpdates::updateStatus, + sharedExecutor, + logger + ); + } + + private static CacheBuilder newCacheBuilder( + Duration cacheTtl, + PersistentDataStoreBuilder.StaleValuesPolicy staleValuesPolicy, + boolean recordCacheStats + ) { + CacheBuilder builder = CacheBuilder.newBuilder(); + boolean isInfiniteTtl = cacheTtl.isNegative(); + if (!isInfiniteTtl) { + if (staleValuesPolicy == PersistentDataStoreBuilder.StaleValuesPolicy.EVICT) { + // We are using an "expire after write" cache. This will evict stale values and block while loading the latest + // from the underlying data store. + builder = builder.expireAfterWrite(cacheTtl); + } else { + // We are using a "refresh after write" cache. This will not automatically evict stale values, allowing them + // to be returned if failures occur when updating them. + builder = builder.refreshAfterWrite(cacheTtl); + } + } + if (recordCacheStats) { + builder = builder.recordStats(); + } + return builder; + } + + @Override + public void close() throws IOException { + statusManager.close(); + core.close(); + } + + @Override + public boolean isInitialized() { + if (inited.get()) { + return true; + } + boolean result; + try { + if (initCache != null) { + result = initCache.get(""); + } else { + result = core.isInitialized(); + } + } catch (Exception e) { + result = false; + } + if (result) { + inited.set(true); + } + return result; + } + + @Override + public void init(FullDataSet allData) { + synchronized (cachedDataKinds) { + cachedDataKinds.clear(); + for (Map.Entry> e: allData.getData()) { + cachedDataKinds.add(e.getKey()); + } + } + ImmutableList.Builder>> allBuilder = ImmutableList.builder(); + for (Map.Entry> e0: allData.getData()) { + DataKind kind = e0.getKey(); + KeyedItems items = serializeAll(kind, e0.getValue()); + allBuilder.add(new AbstractMap.SimpleEntry<>(kind, items)); + } + RuntimeException failure = initCore(new FullDataSet<>(allBuilder.build())); + if (itemCache != null && allCache != null) { + itemCache.invalidateAll(); + allCache.invalidateAll(); + if (failure != null && !cacheIndefinitely) { + // Normally, if the underlying store failed to do the update, we do not want to update the cache - + // the idea being that it's better to stay in a consistent state of having old data than to act + // like we have new data but then suddenly fall back to old data when the cache expires. However, + // if the cache TTL is infinite, then it makes sense to update the cache always. + throw failure; + } + for (Map.Entry> e0: allData.getData()) { + DataKind kind = e0.getKey(); + KeyedItems immutableItems = new KeyedItems<>(ImmutableList.copyOf(e0.getValue().getItems())); + allCache.put(kind, immutableItems); + for (Map.Entry e1: e0.getValue().getItems()) { + itemCache.put(CacheKey.forItem(kind, e1.getKey()), Optional.of(e1.getValue())); + } + } + } + if (failure == null || cacheIndefinitely) { + inited.set(true); + } + if (failure != null) { + throw failure; + } + } + + private RuntimeException initCore(FullDataSet allData) { + try { + core.init(allData); + processError(null); + return null; + } catch (RuntimeException e) { + processError(e); + return e; + } + } + + @Override + public ItemDescriptor get(DataKind kind, String key) { + try { + ItemDescriptor ret = itemCache != null ? itemCache.get(CacheKey.forItem(kind, key)).orNull() : + getAndDeserializeItem(kind, key); + processError(null); + return ret; + } catch (Exception e) { + processError(e); + throw getAsRuntimeException(e); + } + } + + @Override + public KeyedItems getAll(DataKind kind) { + try { + KeyedItems ret; + ret = allCache != null ? allCache.get(kind) : getAllAndDeserialize(kind); + processError(null); + return ret; + } catch (Exception e) { + processError(e); + throw getAsRuntimeException(e); + } + } + + private static RuntimeException getAsRuntimeException(Exception e) { + Throwable t = (e instanceof ExecutionException || e instanceof UncheckedExecutionException) + ? e.getCause() // this is a wrapped exception thrown by a cache + : e; + return t instanceof RuntimeException ? (RuntimeException)t : new RuntimeException(t); + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + synchronized (cachedDataKinds) { + cachedDataKinds.add(kind); + } + SerializedItemDescriptor serializedItem = serialize(kind, item); + boolean updated = false; + RuntimeException failure = null; + try { + updated = core.upsert(kind, key, serializedItem); + processError(null); + } catch (RuntimeException e) { + // Normally, if the underlying store failed to do the update, we do not want to update the cache - + // the idea being that it's better to stay in a consistent state of having old data than to act + // like we have new data but then suddenly fall back to old data when the cache expires. However, + // if the cache TTL is infinite, then it makes sense to update the cache always. + processError(e); + if (!cacheIndefinitely) + { + throw e; + } + failure = e; + } + if (itemCache != null) { + CacheKey cacheKey = CacheKey.forItem(kind, key); + if (failure == null) { + if (updated) { + itemCache.put(cacheKey, Optional.of(item)); + } else { + // there was a concurrent modification elsewhere - update the cache to get the new state + itemCache.refresh(cacheKey); + } + } else { + Optional oldItem = itemCache.getIfPresent(cacheKey); + if (oldItem == null || !oldItem.isPresent() || oldItem.get().getVersion() < item.getVersion()) { + itemCache.put(cacheKey, Optional.of(item)); + } + } + } + if (allCache != null) { + // If the cache has a finite TTL, then we should remove the "all items" cache entry to force + // a reread the next time All is called. However, if it's an infinite TTL, we need to just + // update the item within the existing "all items" entry (since we want things to still work + // even if the underlying store is unavailable). + if (cacheIndefinitely) { + KeyedItems cachedAll = allCache.getIfPresent(kind); + allCache.put(kind, updateSingleItem(cachedAll, key, item)); + } else { + allCache.invalidate(kind); + } + } + if (failure != null) { + throw failure; + } + return updated; + } + + @Override + public boolean isStatusMonitoringEnabled() { + return true; + } + + @Override + public CacheStats getCacheStats() { + if (itemCache == null || allCache == null) { + return null; + } + com.google.common.cache.CacheStats itemStats = itemCache.stats(); + com.google.common.cache.CacheStats allStats = allCache.stats(); + return new CacheStats( + itemStats.hitCount() + allStats.hitCount(), + itemStats.missCount() + allStats.missCount(), + itemStats.loadSuccessCount() + allStats.loadSuccessCount(), + itemStats.loadExceptionCount() + allStats.loadExceptionCount(), + itemStats.totalLoadTime() + allStats.totalLoadTime(), + itemStats.evictionCount() + allStats.evictionCount()); + } + + private ItemDescriptor getAndDeserializeItem(DataKind kind, String key) { + SerializedItemDescriptor maybeSerializedItem = core.get(kind, key); + return maybeSerializedItem == null ? null : deserialize(kind, maybeSerializedItem); + } + + private KeyedItems getAllAndDeserialize(DataKind kind) { + KeyedItems allItems = core.getAll(kind); + if (isEmpty(allItems.getItems())) { + return new KeyedItems(null); + } + ImmutableList.Builder> b = ImmutableList.builder(); + for (Map.Entry e: allItems.getItems()) { + b.add(new AbstractMap.SimpleEntry<>(e.getKey(), deserialize(kind, e.getValue()))); + } + return new KeyedItems<>(b.build()); + } + + private SerializedItemDescriptor serialize(DataKind kind, ItemDescriptor itemDesc) { + boolean isDeleted = itemDesc.getItem() == null; + return new SerializedItemDescriptor(itemDesc.getVersion(), isDeleted, kind.serialize(itemDesc)); + } + + private KeyedItems serializeAll(DataKind kind, KeyedItems items) { + ImmutableList.Builder> itemsBuilder = ImmutableList.builder(); + for (Map.Entry e: items.getItems()) { + itemsBuilder.add(new AbstractMap.SimpleEntry<>(e.getKey(), serialize(kind, e.getValue()))); + } + return new KeyedItems<>(itemsBuilder.build()); + } + + private ItemDescriptor deserialize(DataKind kind, SerializedItemDescriptor serializedItemDesc) { + if (serializedItemDesc.isDeleted() || serializedItemDesc.getSerializedItem() == null) { + return ItemDescriptor.deletedItem(serializedItemDesc.getVersion()); + } + ItemDescriptor deserializedItem = kind.deserialize(serializedItemDesc.getSerializedItem()); + if (serializedItemDesc.getVersion() == 0 || serializedItemDesc.getVersion() == deserializedItem.getVersion() + || deserializedItem.getItem() == null) { + return deserializedItem; + } + // If the store gave us a version number that isn't what was encoded in the object, trust it + return new ItemDescriptor(serializedItemDesc.getVersion(), deserializedItem.getItem()); + } + + private KeyedItems updateSingleItem(KeyedItems items, String key, ItemDescriptor item) { + // This is somewhat inefficient but it's preferable to use immutable data structures in the cache. + return new KeyedItems<>( + ImmutableList.copyOf(concat( + items == null ? ImmutableList.of() : filter(items.getItems(), e -> !e.getKey().equals(key)), + ImmutableList.>of(new AbstractMap.SimpleEntry<>(key, item)) + ) + )); + } + + private void processError(Throwable error) { + if (error == null) { + // If we're waiting to recover after a failure, we'll let the polling routine take care + // of signaling success. Even if we could signal success a little earlier based on the + // success of whatever operation we just did, we'd rather avoid the overhead of acquiring + // w.statusLock every time we do anything. So we'll just do nothing here. + return; + } + statusManager.updateAvailability(false); + } + + private boolean pollAvailabilityAfterOutage() { + if (!core.isStoreAvailable()) { + return false; + } + + if (cacheIndefinitely && allCache != null) { + // If we're in infinite cache mode, then we can assume the cache has a full set of current + // flag data (since presumably the data source has still been running) and we can just + // write the contents of the cache to the underlying data store. + DataKind[] allKinds; + synchronized (cachedDataKinds) { + allKinds = cachedDataKinds.toArray(new DataKind[cachedDataKinds.size()]); + } + ImmutableList.Builder>> builder = ImmutableList.builder(); + for (DataKind kind: allKinds) { + KeyedItems items = allCache.getIfPresent(kind); + if (items != null) { + builder.add(new AbstractMap.SimpleEntry<>(kind, serializeAll(kind, items))); + } + } + RuntimeException e = initCore(new FullDataSet<>(builder.build())); + if (e == null) { + logger.warn("Successfully updated persistent store from cached data"); + } else { + // We failed to write the cached data to the underlying store. In this case, we should not + // return to a recovered state, but just try this all again next time the poll task runs. + logger.error("Tried to write cached data to persistent store after a store outage, but failed: {}", + LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + return false; + } + } + + return true; + } + + static final class CacheKey { + final DataKind kind; + final String key; + + public static CacheKey forItem(DataKind kind, String key) { + return new CacheKey(kind, key); + } + + private CacheKey(DataKind kind, String key) { + this.kind = kind; + this.key = key; + } + + @Override + public boolean equals(Object other) { + if (other instanceof CacheKey) { + CacheKey o = (CacheKey) other; + return o.kind.getName().equals(this.kind.getName()) && o.key.equals(this.key); + } + return false; + } + + @Override + public int hashCode() { + return kind.getName().hashCode() * 31 + key.hashCode(); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java new file mode 100644 index 0000000..99d63b5 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/PollingProcessor.java @@ -0,0 +1,135 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.annotations.VisibleForTesting; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import java.io.IOException; +import java.time.Duration; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.launchdarkly.sdk.internal.http.HttpErrors.checkIfErrorIsRecoverableAndLog; +import static com.launchdarkly.sdk.internal.http.HttpErrors.httpErrorDescription; + +final class PollingProcessor implements DataSource { + private static final String ERROR_CONTEXT_MESSAGE = "on polling request"; + private static final String WILL_RETRY_MESSAGE = "will retry at next scheduled poll interval"; + + @VisibleForTesting final FeatureRequestor requestor; + private final DataSourceUpdateSink dataSourceUpdates; + private final ScheduledExecutorService scheduler; + @VisibleForTesting final Duration pollInterval; + private final AtomicBoolean initialized = new AtomicBoolean(false); + private final CompletableFuture initFuture; + private volatile ScheduledFuture task; + private final LDLogger logger; + + PollingProcessor( + FeatureRequestor requestor, + DataSourceUpdateSink dataSourceUpdates, + ScheduledExecutorService sharedExecutor, + Duration pollInterval, + LDLogger logger + ) { + this.requestor = requestor; // note that HTTP configuration is applied to the requestor when it is created + this.dataSourceUpdates = dataSourceUpdates; + this.scheduler = sharedExecutor; + this.pollInterval = pollInterval; + this.initFuture = new CompletableFuture<>(); + this.logger = logger; + } + + @Override + public boolean isInitialized() { + return initialized.get(); + } + + @Override + public void close() throws IOException { + logger.info("Closing LaunchDarkly PollingProcessor"); + requestor.close(); + + // Even though the shared executor will be shut down when the LDClient is closed, it's still good + // behavior to remove our polling task now - especially because we might be running in a test + // environment where there isn't actually an LDClient. + synchronized (this) { + if (task != null) { + task.cancel(true); + task = null; + } + } + } + + @Override + public Future start() { + logger.info("Starting LaunchDarkly polling client with interval: {} milliseconds", + pollInterval.toMillis()); + + synchronized (this) { + if (task == null) { + task = scheduler.scheduleAtFixedRate(this::poll, 0L, pollInterval.toMillis(), TimeUnit.MILLISECONDS); + } + } + + return initFuture; + } + + private void poll() { + try { + // If we already obtained data earlier, and the poll request returns a cached response, then we don't + // want to bother parsing the data or reinitializing the data store. But if we never succeeded in + // storing any data, then we would still want to parse and try to store it even if it's cached. + boolean alreadyInited = initialized.get(); + FullDataSet allData = requestor.getAllData(!alreadyInited); + if (allData == null) { + // This means it was cached, and alreadyInited was true + dataSourceUpdates.updateStatus(State.VALID, null); + } else { + if (dataSourceUpdates.init(allData)) { + dataSourceUpdates.updateStatus(State.VALID, null); + if (!initialized.getAndSet(true)) { + logger.info("Initialized LaunchDarkly client."); + initFuture.complete(null); + } + } + } + } catch (HttpErrorException e) { + ErrorInfo errorInfo = ErrorInfo.fromHttpError(e.getStatus()); + boolean recoverable = checkIfErrorIsRecoverableAndLog(logger, httpErrorDescription(e.getStatus()), + ERROR_CONTEXT_MESSAGE, e.getStatus(), WILL_RETRY_MESSAGE); + if (recoverable) { + dataSourceUpdates.updateStatus(State.INTERRUPTED, errorInfo); + } else { + dataSourceUpdates.updateStatus(State.OFF, errorInfo); + initFuture.complete(null); // if client is initializing, make it stop waiting; has no effect if already inited + if (task != null) { + task.cancel(true); + task = null; + } + } + } catch (IOException e) { + checkIfErrorIsRecoverableAndLog(logger, e.toString(), ERROR_CONTEXT_MESSAGE, 0, WILL_RETRY_MESSAGE); + dataSourceUpdates.updateStatus(State.INTERRUPTED, ErrorInfo.fromException(ErrorKind.NETWORK_ERROR, e)); + } catch (SerializationException e) { + logger.error("Polling request received malformed data: {}", e.toString()); + dataSourceUpdates.updateStatus(State.INTERRUPTED, ErrorInfo.fromException(ErrorKind.INVALID_DATA, e)); + } catch (Exception e) { + logger.error("Unexpected error from polling processor: {}", e.toString()); + logger.debug(e.toString(), e); + dataSourceUpdates.updateStatus(State.INTERRUPTED, ErrorInfo.fromException(ErrorKind.UNKNOWN, e)); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SemanticVersion.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SemanticVersion.java new file mode 100644 index 0000000..c5fd314 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SemanticVersion.java @@ -0,0 +1,179 @@ +package com.launchdarkly.sdk.server; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Simple implementation of semantic version parsing and comparison according to the Semantic + * Versions 2.0.0 standard (http://semver.org). + */ +final class SemanticVersion implements Comparable { + + private static Pattern VERSION_REGEX = Pattern.compile( + "^(?0|[1-9]\\d*)(\\.(?0|[1-9]\\d*))?(\\.(?0|[1-9]\\d*))?" + + "(\\-(?[0-9A-Za-z\\-\\.]+))?(\\+(?[0-9A-Za-z\\-\\.]+))?$"); + + @SuppressWarnings("serial") + public static class InvalidVersionException extends Exception { + public InvalidVersionException(String message) { + super(message); + } + } + + private final int major; + private final int minor; + private final int patch; + private final String prerelease; + private final String[] prereleaseComponents; + private final String build; + + public SemanticVersion(int major, int minor, int patch, String prerelease, String build) { + this.major = major; + this.minor = minor; + this.patch = patch; + this.prerelease = prerelease; + this.prereleaseComponents = prerelease == null ? null : prerelease.split("\\."); + this.build = build; + } + + public int getMajor() { + return major; + } + + public int getMinor() { + return minor; + } + + public int getPatch() { + return patch; + } + + public String getPrerelease() { + return prerelease; + } + + public String getBuild() { + return build; + } + + /** + * Attempts to parse a string as a semantic version according to the Semver 2.0.0 specification. + * @param input the input string + * @return a SemanticVersion instance + * @throws InvalidVersionException if the version could not be parsed + */ + public static SemanticVersion parse(String input) throws InvalidVersionException { + return parse(input, false); + } + + /** + * Attempts to parse a string as a semantic version according to the Semver 2.0.0 specification, except that + * the minor and patch versions may optionally be omitted. + * @param input the input string + * @param allowMissingMinorAndPatch true if the parser should tolerate the absence of a minor and/or + * patch version; if absent, they will be treated as zero + * @return a SemanticVersion instance + * @throws InvalidVersionException if the version could not be parsed + */ + public static SemanticVersion parse(String input, boolean allowMissingMinorAndPatch) throws InvalidVersionException { + Matcher matcher = VERSION_REGEX.matcher(input); + if (!matcher.matches()) { + throw new InvalidVersionException("Invalid semantic version"); + } + int major, minor, patch; + try { + major = Integer.parseInt(matcher.group("major")); + if (!allowMissingMinorAndPatch) { + if (matcher.group("minor") == null || matcher.group("patch") == null) { + throw new InvalidVersionException("Invalid semantic version"); + } + } + minor = matcher.group("minor") == null ? 0 : Integer.parseInt(matcher.group("minor")); + patch = matcher.group("patch") == null ? 0 : Integer.parseInt(matcher.group("patch")); + } catch (NumberFormatException e) { + // COVERAGE: This should be impossible, because our regex should only match if these strings are numeric. + throw new InvalidVersionException("Invalid semantic version"); + } + String prerelease = matcher.group("prerel"); + String build = matcher.group("build"); + return new SemanticVersion(major, minor, patch, prerelease, build); + } + + @Override + public int compareTo(SemanticVersion other) { + return comparePrecedence(other); + } + + /** + * Compares this object with another SemanticVersion according to Semver 2.0.0 precedence rules. + * @param other another SemanticVersion + * @return 0 if equal, -1 if the current object has lower precedence, or 1 if the current object has higher precedence + */ + public int comparePrecedence(SemanticVersion other) { + if (other == null) { + return 1; + } + if (major != other.major) { + return Integer.compare(major, other.major); + } + if (minor != other.minor) { + return Integer.compare(minor, other.minor); + } + if (patch != other.patch) { + return Integer.compare(patch, other.patch); + } + if (prerelease == null && other.prerelease == null) { + return 0; + } + // *no* prerelease component always has higher precedence than *any* prerelease component + if (prerelease == null) { + return 1; + } + if (other.prerelease == null) { + return -1; + } + return compareIdentifiers(prereleaseComponents, other.prereleaseComponents); + } + + private int compareIdentifiers(String[] ids1, String[] ids2) { + for (int i = 0; ; i++) { + if (i >= ids1.length) + { + // x.y is always less than x.y.z + return (i >= ids2.length) ? 0 : -1; + } + if (i >= ids2.length) + { + return 1; + } + // each sub-identifier is compared numerically if both are numeric; if both are non-numeric, + // they're compared as strings; otherwise, the numeric one is the lesser one + int n1 = 0, n2 = 0, d; + boolean isNum1, isNum2; + try { + n1 = Integer.parseInt(ids1[i]); + isNum1 = true; + } catch (NumberFormatException e) { + isNum1 = false; + } + try { + n2 = Integer.parseInt(ids2[i]); + isNum2 = true; + } catch (NumberFormatException e) { + isNum2 = false; + } + if (isNum1 && isNum2) + { + d = Integer.compare(n1, n2); + } + else + { + d = isNum1 ? -1 : (isNum2 ? 1 : ids1[i].compareTo(ids2[i])); + } + if (d != 0) + { + return d; + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java new file mode 100644 index 0000000..d01b555 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEvents.java @@ -0,0 +1,97 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.internal.events.DiagnosticConfigProperty; +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.DiagnosticDescription; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; + +abstract class ServerSideDiagnosticEvents { + public static DiagnosticStore.SdkDiagnosticParams getSdkDiagnosticParams( + ClientContext clientContext, + LDConfig config + ) { + return new DiagnosticStore.SdkDiagnosticParams( + clientContext.getSdkKey(), + "java-server-sdk", + Version.SDK_VERSION, + "java", + makePlatformData(), + ImmutableMap.copyOf(clientContext.getHttp().getDefaultHeaders()), + makeConfigProperties(clientContext, config) + ); + } + + private static ImmutableList makeConfigProperties(ClientContext clientContext, LDConfig config) { + ImmutableList.Builder listBuilder = ImmutableList.builder(); + HttpConfiguration httpConfig = clientContext.getHttp(); + + // Add the top-level properties that are not specific to a particular component type. + ObjectBuilder builder = LDValue.buildObject(); + builder.put(DiagnosticConfigProperty.CONNECT_TIMEOUT_MILLIS.name, httpConfig.getConnectTimeout().toMillis()); + builder.put(DiagnosticConfigProperty.SOCKET_TIMEOUT_MILLIS.name, httpConfig.getSocketTimeout().toMillis()); + builder.put(DiagnosticConfigProperty.USING_PROXY.name, httpConfig.getProxy() != null); + builder.put(DiagnosticConfigProperty.USING_PROXY_AUTHENTICATOR.name, httpConfig.getProxyAuthentication() != null); + builder.put(DiagnosticConfigProperty.START_WAIT_MILLIS.name, config.startWait.toMillis()); + listBuilder.add(builder.build()); + + // Allow each pluggable component to describe its own relevant properties. + listBuilder.add(describeComponent(config.dataStore, clientContext, DiagnosticConfigProperty.DATA_STORE_TYPE.name)); + listBuilder.add(describeComponent(config.dataSource, clientContext, null)); + listBuilder.add(describeComponent(config.events, clientContext, null)); + return listBuilder.build(); + } + + // Attempts to add relevant configuration properties, if any, from a customizable component: + // - If the component does not implement DiagnosticDescription, set the defaultPropertyName property to "custom". + // - If it does implement DiagnosticDescription, call its describeConfiguration() method to get a value. + // - If the value is a string, then set the defaultPropertyName property to that value. + // - If the value is an object, then copy all of its properties as long as they are ones we recognize + // and have the expected type. + private static LDValue describeComponent( + Object component, + ClientContext clientContext, + String defaultPropertyName + ) { + if (!(component instanceof DiagnosticDescription)) { + if (defaultPropertyName != null) { + return LDValue.buildObject().put(defaultPropertyName, "custom").build(); + } + return LDValue.ofNull(); + } + LDValue componentDesc = LDValue.normalize(((DiagnosticDescription)component).describeConfiguration(clientContext)); + if (defaultPropertyName == null) { + return componentDesc; + } + return LDValue.buildObject().put(defaultPropertyName, + componentDesc.isString() ? componentDesc.stringValue() : "custom").build(); + } + + private static LDValue makePlatformData() { + // We're getting these properties in the server-side-specific logic because they don't return + // useful values in Android. + return LDValue.buildObject() + .put("osName", normalizeOsName(System.getProperty("os.name"))) + .put("javaVendor", System.getProperty("java.vendor")) + .put("javaVersion", System.getProperty("java.version")) + .build(); + } + + private static String normalizeOsName(String osName) { + // For our diagnostics data, we prefer the standard names "Linux", "MacOS", and "Windows". + // "Linux" is already what the JRE returns in Linux. In Windows, we get "Windows 10" etc. + if (osName != null) { + if (osName.equals("Mac OS X")) { + return "MacOS"; + } + if (osName.startsWith("Windows")) { + return "Windows"; + } + } + return osName; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicator.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicator.java new file mode 100644 index 0000000..37fb78f --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicator.java @@ -0,0 +1,39 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.EventContextDeduplicator; + +import java.time.Duration; + +final class ServerSideEventContextDeduplicator implements EventContextDeduplicator { + private final SimpleLRUCache contextKeys; + private final Duration flushInterval; + + public ServerSideEventContextDeduplicator( + int capacity, + Duration flushInterval + ) { + this.contextKeys = new SimpleLRUCache<>(capacity); + this.flushInterval = flushInterval; + } + + @Override + public Long getFlushInterval() { + return flushInterval.toMillis(); + } + + @Override + public boolean processContext(LDContext context) { + String key = context.getFullyQualifiedKey(); + if (key == null || key.isEmpty()) { + return false; + } + String previousValue = contextKeys.put(key, key); + return previousValue == null; + } + + @Override + public void flush() { + contextKeys.clear(); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SimpleLRUCache.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SimpleLRUCache.java new file mode 100644 index 0000000..cc79f6c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/SimpleLRUCache.java @@ -0,0 +1,24 @@ +package com.launchdarkly.sdk.server; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * A very basic implementation of a LRU cache with a fixed capacity. Note that in this + * implementation, entries only become new again when written, not when read. + * See: http://chriswu.me/blog/a-lru-cache-in-10-lines-of-java/ + */ +@SuppressWarnings("serial") +class SimpleLRUCache extends LinkedHashMap { + private final int capacity; + + SimpleLRUCache(int capacity) { + super(16, 0.75f, true); + this.capacity = capacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() > capacity; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StandardEndpoints.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StandardEndpoints.java new file mode 100644 index 0000000..867d0e8 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StandardEndpoints.java @@ -0,0 +1,52 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; + +import java.net.URI; + +abstract class StandardEndpoints { + private StandardEndpoints() {} + + static final URI DEFAULT_STREAMING_BASE_URI = URI.create("https://stream.launchdarkly.com"); + static final URI DEFAULT_POLLING_BASE_URI = URI.create("https://app.launchdarkly.com"); + static final URI DEFAULT_EVENTS_BASE_URI = URI.create("https://events.launchdarkly.com"); + + static final String STREAMING_REQUEST_PATH = "/all"; + static final String POLLING_REQUEST_PATH = "/sdk/latest-all"; + + /** + * Internal method to decide which URI a given component should connect to. + *

    + * Always returns some URI, falling back on the default if necessary, but logs a warning if we detect that the application + * set some custom endpoints but not this one. + * + * @param serviceEndpointsValue the value set in ServiceEndpoints (this is either the default URI, a custom URI, or null) + * @param defaultValue the constant default URI value defined in StandardEndpoints + * @param description a human-readable string for the type of endpoint being selected, for logging purposes + * @param logger the logger to which we should print the warning, if needed + * @return the base URI we should connect to + */ + static URI selectBaseUri(URI serviceEndpointsValue, URI defaultValue, String description, LDLogger logger) { + if (serviceEndpointsValue != null) { + return serviceEndpointsValue; + } + logger.warn("You have set custom ServiceEndpoints without specifying the {} base URI; connections may not work properly", description); + return defaultValue; + } + + /** + * Internal method to determine whether a given base URI was set to a custom value or not. + *

    + * This boolean value is only used for our diagnostic events. We only check if the value + * differs from the default; if the base URI was "overridden" in configuration, but + * happens to be equal to the default URI, we don't count that as custom + * for the purposes of this diagnostic. + * + * @param serviceEndpointsValue the value set in ServiceEndpoints + * @param defaultValue the constant default URI value defined in StandardEndpoints + * @return true iff the base URI was customized + */ + static boolean isCustomBaseUri(URI serviceEndpointsValue, URI defaultValue) { + return serviceEndpointsValue != null && !serviceEndpointsValue.equals(defaultValue); + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessor.java new file mode 100644 index 0000000..773ae7e --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessor.java @@ -0,0 +1,430 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.annotations.VisibleForTesting; +import com.google.gson.JsonParseException; +import com.google.gson.stream.JsonReader; +import com.launchdarkly.eventsource.ConnectStrategy; +import com.launchdarkly.eventsource.ErrorStrategy; +import com.launchdarkly.eventsource.EventSource; +import com.launchdarkly.eventsource.FaultEvent; +import com.launchdarkly.eventsource.HttpConnectStrategy; +import com.launchdarkly.eventsource.MessageEvent; +import com.launchdarkly.eventsource.StreamClosedByCallerException; +import com.launchdarkly.eventsource.StreamClosedByServerException; +import com.launchdarkly.eventsource.StreamClosedWithIncompleteMessageException; +import com.launchdarkly.eventsource.StreamEvent; +import com.launchdarkly.eventsource.StreamException; +import com.launchdarkly.eventsource.StreamHttpErrorException; +import com.launchdarkly.eventsource.StreamIOException; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.internal.http.HttpConsts; +import com.launchdarkly.sdk.internal.http.HttpHelpers; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.sdk.server.StreamProcessorEvents.DeleteData; +import com.launchdarkly.sdk.server.StreamProcessorEvents.PatchData; +import com.launchdarkly.sdk.server.StreamProcessorEvents.PutData; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import java.io.IOException; +import java.io.Reader; +import java.net.URI; +import java.time.Duration; +import java.time.Instant; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; + +import static com.launchdarkly.sdk.internal.http.HttpErrors.checkIfErrorIsRecoverableAndLog; +import static com.launchdarkly.sdk.internal.http.HttpErrors.httpErrorDescription; + +import okhttp3.Headers; + +/** + * Implementation of the streaming data source, not including the lower-level SSE implementation which is in + * okhttp-eventsource. + * + * Error handling works as follows: + * 1. If any event is malformed, we must assume the stream is broken and we may have missed updates. Set the + * data source state to INTERRUPTED, with an error kind of INVALID_DATA, and restart the stream. + * 2. If we try to put updates into the data store and we get an error, we must assume something's wrong with the + * data store. We don't have to log this error because it is logged by DataSourceUpdatesImpl, which will also set + * our state to INTERRUPTED for us. + * 2a. If the data store supports status notifications (which all persistent stores normally do), then we can + * assume it has entered a failed state and will notify us once it is working again. If and when it recovers, then + * it will tell us whether we need to restart the stream (to ensure that we haven't missed any updates), or + * whether it has already persisted all of the stream updates we received during the outage. + * 2b. If the data store doesn't support status notifications (which is normally only true of the in-memory store) + * then we don't know the significance of the error, but we must assume that updates have been lost, so we'll + * restart the stream. + * 3. If we receive an unrecoverable error like HTTP 401, we close the stream and don't retry, and set the state + * to OFF. Any other HTTP error or network error causes a retry with backoff, with a state of INTERRUPTED. + * 4. We set the Future returned by start() to tell the client initialization logic that initialization has either + * succeeded (we got an initial payload and successfully stored it) or permanently failed (we got a 401, etc.). + * Otherwise, the client initialization method may time out but we will still be retrying in the background, and + * if we succeed then the client can detect that we're initialized now by calling our Initialized method. + */ +final class StreamProcessor implements DataSource { + private static final String PUT = "put"; + private static final String PATCH = "patch"; + private static final String DELETE = "delete"; + private static final Duration DEAD_CONNECTION_INTERVAL = Duration.ofSeconds(300); + private static final String ERROR_CONTEXT_MESSAGE = "in stream connection"; + private static final String WILL_RETRY_MESSAGE = "will retry"; + + private final DataSourceUpdateSink dataSourceUpdates; + private final HttpProperties httpProperties; + private final Headers headers; + @VisibleForTesting + final URI streamUri; + @VisibleForTesting + final Duration initialReconnectDelay; + private final DiagnosticStore diagnosticAccumulator; + private final int threadPriority; + private final DataStoreStatusProvider.StatusListener statusListener; + private volatile EventSource es; + private final AtomicBoolean initialized = new AtomicBoolean(false); + private final AtomicBoolean closed = new AtomicBoolean(false); + private volatile long esStarted = 0; + private volatile boolean lastStoreUpdateFailed = false; + private final LDLogger logger; + + StreamProcessor( + HttpProperties httpProperties, + DataSourceUpdateSink dataSourceUpdates, + int threadPriority, + DiagnosticStore diagnosticAccumulator, + URI streamUri, + String payloadFilter, + Duration initialReconnectDelay, + LDLogger logger) { + this.dataSourceUpdates = dataSourceUpdates; + this.httpProperties = httpProperties; + this.diagnosticAccumulator = diagnosticAccumulator; + this.threadPriority = threadPriority; + this.initialReconnectDelay = initialReconnectDelay; + this.logger = logger; + + URI tempUri = HttpHelpers.concatenateUriPath(streamUri, StandardEndpoints.STREAMING_REQUEST_PATH); + if (payloadFilter != null) { + if (!payloadFilter.isEmpty()) { + tempUri = HttpHelpers.addQueryParam(tempUri, HttpConsts.QUERY_PARAM_FILTER, payloadFilter); + } else { + logger.info("Payload filter \"{}\" is not valid, not applying filter.", payloadFilter); + } + } + this.streamUri = tempUri; + + + this.headers = httpProperties.toHeadersBuilder() + .add("Accept", "text/event-stream") + .build(); + + if (dataSourceUpdates.getDataStoreStatusProvider() != null && + dataSourceUpdates.getDataStoreStatusProvider().isStatusMonitoringEnabled()) { + this.statusListener = this::onStoreStatusChanged; + dataSourceUpdates.getDataStoreStatusProvider().addStatusListener(statusListener); + } else { + this.statusListener = null; + } + } + + private void onStoreStatusChanged(DataStoreStatusProvider.Status newStatus) { + if (newStatus.isAvailable()) { + if (newStatus.isRefreshNeeded()) { + // The store has just transitioned from unavailable to available, and we can't guarantee that + // all of the latest data got cached, so let's restart the stream to refresh all the data. + EventSource stream = es; + if (stream != null) { + logger.warn("Restarting stream to refresh data after data store outage"); + stream.interrupt(); + } + } + } + } + + @Override + public Future start() { + final CompletableFuture initFuture = new CompletableFuture<>(); + + // Notes about the configuration of the EventSource below: + // + // 1. Setting streamEventData(true) is an optimization to let us read the event's data field directly + // from HTTP response stream, rather than waiting for the whole event to be buffered in memory. See + // the Javadoc for EventSource.Builder.streamEventData for more details. This relies on an assumption + // that the LD streaming endpoints will always send the "event:" field before the "data:" field. + // + // 2. The readTimeout here is not the same read timeout that can be set in LDConfig. We default to a + // smaller one there because we don't expect long delays within any *non*-streaming response that the + // LD client gets. A read timeout on the stream will result in the connection being cycled, so we set + // this to be slightly more than the expected interval between heartbeat signals. + + HttpConnectStrategy eventSourceHttpConfig = ConnectStrategy.http(this.streamUri) + .headers(headers) + .clientBuilderActions(clientBuilder -> { + httpProperties.applyToHttpClientBuilder(clientBuilder); + }) + // Set readTimeout last, to ensure that this hard-coded value overrides any other read + // timeout that might have been set by httpProperties (see comment about readTimeout above). + .readTimeout(DEAD_CONNECTION_INTERVAL.toMillis(), TimeUnit.MILLISECONDS); + EventSource.Builder builder = new EventSource.Builder(eventSourceHttpConfig) + .errorStrategy(ErrorStrategy.alwaysContinue()) + // alwaysContinue means we want EventSource to give us a FaultEvent rather + // than throwing an exception if the stream fails + .logger(logger) + .readBufferSize(5000) + .streamEventData(true) + .expectFields("event") + .retryDelay(initialReconnectDelay.toMillis(), TimeUnit.MILLISECONDS); + es = builder.build(); + + Thread thread = new Thread(() -> { + esStarted = System.currentTimeMillis(); + + // We are deliberately not calling es.start() here, but just iterating over es.anyEvents(). + // EventSource will start the stream connection either way, but if we called start(), it + // would swallow any FaultEvents that happened during the initial conection attempt; we + // want to know about those. + try { + for (StreamEvent event: es.anyEvents()) { + if (!handleEvent(event, initFuture)) { + // handleEvent returns false if we should fall through and end the thread + break; + } + } + } catch (Exception e) { + // Any uncaught runtime exception at this point would be coming from es.anyEvents(). + // That's not expected-- all deliberate EventSource exceptions are checked exceptions. + // So we have to assume something is wrong that we can't recover from at this point, + // and just let the thread terminate. That's better than having the thread be killed + // by an uncaught exception. + if (closed.get()) { + return; // ignore any exception that's just a side effect of stopping the EventSource + } + logger.error("Stream thread has ended due to unexpected exception: {}", LogValues.exceptionSummary(e)); + // deliberately log stacktrace at error level since this is an unusual circumstance + logger.error(LogValues.exceptionTrace(e)); + } + }); + thread.setName("LaunchDarkly-streaming"); + thread.setDaemon(true); + thread.setPriority(threadPriority); + thread.start(); + + return initFuture; + } + + private void recordStreamInit(boolean failed) { + if (diagnosticAccumulator != null && esStarted != 0) { + diagnosticAccumulator.recordStreamInit(esStarted, System.currentTimeMillis() - esStarted, failed); + } + } + + @Override + public void close() throws IOException { + if (closed.getAndSet(true)) { + return; // was already closed + } + logger.info("Closing LaunchDarkly StreamProcessor"); + if (statusListener != null) { + dataSourceUpdates.getDataStoreStatusProvider().removeStatusListener(statusListener); + } + if (es != null) { + es.close(); + } + dataSourceUpdates.updateStatus(State.OFF, null); + } + + @Override + public boolean isInitialized() { + return initialized.get(); + } + + // Handles a single StreamEvent and returns true if we should keep the stream alive, + // or false if we should shut down permanently. + private boolean handleEvent(StreamEvent event, CompletableFuture initFuture) { + if (closed.get()) { + return false; + } + logger.debug("Received StreamEvent: {}", event); + if (event instanceof MessageEvent) { + handleMessage((MessageEvent)event, initFuture); + } else if (event instanceof FaultEvent) { + return handleError(((FaultEvent)event).getCause(), initFuture); + } + return true; + } + + private void handleMessage(MessageEvent event, CompletableFuture initFuture) { + try { + switch (event.getEventName()) { + case PUT: + handlePut(event.getDataReader(), initFuture); + break; + + case PATCH: + handlePatch(event.getDataReader()); + break; + + case DELETE: + handleDelete(event.getDataReader()); + break; + + default: + logger.warn("Unexpected event found in stream: {}", event.getEventName()); + break; + } + lastStoreUpdateFailed = false; + dataSourceUpdates.updateStatus(State.VALID, null); + } catch (StreamInputException e) { + if (exceptionHasCause(e, StreamClosedWithIncompleteMessageException.class)) { + // JSON parsing failed because the event was cut off prematurely-- because the + // stream got closed. In this case we should simply throw the event away; the + // closing of the stream will be handled separately on our next pass through + // the loop, and is logged separately. There's no point in logging an error + // about invalid JSON when the real problem is a broken connection; invalid + // JSON is significant only if we think we have a complete message. + return; + } + logger.error("LaunchDarkly service request failed or received invalid data: {}", + LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + + ErrorInfo errorInfo = new ErrorInfo( + e.getCause() instanceof IOException ? ErrorKind.NETWORK_ERROR : ErrorKind.INVALID_DATA, + 0, + e.getCause() == null ? e.getMessage() : e.getCause().toString(), + Instant.now() + ); + dataSourceUpdates.updateStatus(State.INTERRUPTED, errorInfo); + + es.interrupt(); + } catch (StreamStoreException e) { + // See item 2 in error handling comments at top of class + if (statusListener == null) { + if (!lastStoreUpdateFailed) { + logger.warn("Restarting stream to ensure that we have the latest data"); + } + es.interrupt(); + } + lastStoreUpdateFailed = true; + } catch (Exception e) { + logger.warn("Unexpected error from stream processor: {}", LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + } + } + + private static boolean exceptionHasCause(Throwable e, Class c) { + if (c.isAssignableFrom(e.getClass())) { + return true; + } + return e.getCause() != null && exceptionHasCause(e.getCause(), c); + } + + private void handlePut(Reader eventData, CompletableFuture initFuture) + throws StreamInputException, StreamStoreException { + recordStreamInit(false); + esStarted = 0; + PutData putData = parseStreamJson(StreamProcessorEvents::parsePutData, eventData); + if (!dataSourceUpdates.init(putData.data)) { + throw new StreamStoreException(); + } + if (!initialized.getAndSet(true)) { + initFuture.complete(null); + logger.info("Initialized LaunchDarkly client."); + } + } + + private void handlePatch(Reader eventData) throws StreamInputException, StreamStoreException { + PatchData data = parseStreamJson(StreamProcessorEvents::parsePatchData, eventData); + if (data.kind == null) { + return; + } + if (!dataSourceUpdates.upsert(data.kind, data.key, data.item)) { + throw new StreamStoreException(); + } + } + + private void handleDelete(Reader eventData) throws StreamInputException, StreamStoreException { + DeleteData data = parseStreamJson(StreamProcessorEvents::parseDeleteData, eventData); + if (data.kind == null) { + return; + } + ItemDescriptor placeholder = new ItemDescriptor(data.version, null); + if (!dataSourceUpdates.upsert(data.kind, data.key, placeholder)) { + throw new StreamStoreException(); + } + } + + private boolean handleError(StreamException e, CompletableFuture initFuture) { + boolean streamFailed = true; + if (e instanceof StreamClosedByCallerException) { + // This indicates that we ourselves deliberately restarted the stream, so we don't + // treat that as a failure in our analytics. + streamFailed = false; + } else { + logger.warn("Encountered EventSource error: {}", LogValues.exceptionSummary(e)); + } + recordStreamInit(streamFailed); + + if (e instanceof StreamHttpErrorException) { + int status = ((StreamHttpErrorException)e).getCode(); + ErrorInfo errorInfo = ErrorInfo.fromHttpError(status); + + boolean recoverable = checkIfErrorIsRecoverableAndLog(logger, httpErrorDescription(status), + ERROR_CONTEXT_MESSAGE, status, WILL_RETRY_MESSAGE); + if (recoverable) { + dataSourceUpdates.updateStatus(State.INTERRUPTED, errorInfo); + esStarted = System.currentTimeMillis(); + return true; // allow reconnect + } else { + dataSourceUpdates.updateStatus(State.OFF, errorInfo); + initFuture.complete(null); // if client is initializing, make it stop waiting; has no effect if already inited + return false; // don't reconnect + } + } + + boolean isNetworkError = e instanceof StreamIOException || e instanceof StreamClosedByServerException; + checkIfErrorIsRecoverableAndLog(logger, e.toString(), ERROR_CONTEXT_MESSAGE, 0, WILL_RETRY_MESSAGE); + ErrorInfo errorInfo = ErrorInfo.fromException(isNetworkError ? ErrorKind.NETWORK_ERROR : ErrorKind.UNKNOWN, e); + dataSourceUpdates.updateStatus(State.INTERRUPTED, errorInfo); + return true; // allow reconnect + } + + private static T parseStreamJson(Function parser, Reader r) throws StreamInputException { + try { + try (JsonReader jr = new JsonReader(r)) { + return parser.apply(jr); + } + } catch (JsonParseException e) { + throw new StreamInputException(e); + } catch (SerializationException e) { + throw new StreamInputException(e); + } catch (IOException e) { + throw new StreamInputException(e); + } + } + + // StreamInputException is either a JSON parsing error *or* a failure to query another endpoint + // (for indirect/put or indirect/patch); either way, it implies that we were unable to get valid data from LD services. + @SuppressWarnings("serial") + private static final class StreamInputException extends Exception { + public StreamInputException(Throwable cause) { + super(cause); + } + } + + // This exception class indicates that the data store failed to persist an update. + @SuppressWarnings("serial") + private static final class StreamStoreException extends Exception {} +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessorEvents.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessorEvents.java new file mode 100644 index 0000000..06fb6b9 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/StreamProcessorEvents.java @@ -0,0 +1,270 @@ +package com.launchdarkly.sdk.server; + +import com.google.gson.JsonElement; +import com.google.gson.JsonParseException; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.DataModelDependencies.KindAndKey; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import java.io.IOException; + +import static com.launchdarkly.sdk.server.DataModel.ALL_DATA_KINDS; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataModelSerialization.deserializeFromJsonReader; +import static com.launchdarkly.sdk.server.DataModelSerialization.deserializeFromParsedJson; +import static com.launchdarkly.sdk.server.DataModelSerialization.parseFullDataSet; +import static com.launchdarkly.sdk.server.JsonHelpers.gsonInstance; + +// Deserialization of stream message data is all encapsulated here, so StreamProcessor can +// deal with just the logical behavior of the stream and we can test this logic separately. +// The parseFullDataSet +// +// All of the parsing methods have the following behavior: +// +// - They take the input data as a JsonReader, which the caller is responsible for constructing. +// +// - A SerializationException is thrown for any malformed data. That includes 1. totally invalid +// JSON, 2. well-formed JSON that is missing a necessary property for this message type. The Gson +// parser can throw various kinds of unchecked exceptions for things like wrong data types, but we +// wrap them all in SerializationException. +// +// - For messages that have a "path" property, which might be for instance "/flags/xyz" to refer +// to a feature flag with the key "xyz", an unrecognized path like "/cats/Lucy" is not considered +// an error since it might mean LaunchDarkly now supports some new kind of data the SDK can't yet +// use and should ignore. In this case we simply return null in place of a DataKind. +abstract class StreamProcessorEvents { + // This is the logical representation of the data in the "put" event. In the JSON representation, + // the "data" property is actually a map of maps, but the schema we use internally is a list of + // lists instead. + // + // The "path" property is normally always "/"; the LD streaming service sends this property, but + // some versions of Relay do not, so we do not require it. + // + // Example JSON representation: + // + // { + // "path": "/", + // "data": { + // "flags": { + // "flag1": { "key": "flag1", "version": 1, ...etc. }, + // "flag2": { "key": "flag2", "version": 1, ...etc. }, + // }, + // "segments": { + // "segment1": { "key", "segment1", "version": 1, ...etc. } + // } + // } + // } + static final class PutData { + final String path; + final FullDataSet data; + + PutData(String path, FullDataSet data) { + this.path = path; + this.data = data; + } + } + + // This is the logical representation of the data in the "patch" event. In the JSON representation, + // there is a "path" property in the format "/flags/key" or "/segments/key", which we convert into + // Kind and Key when we parse it. The "data" property is the JSON representation of the flag or + // segment, which we deserialize into an ItemDescriptor. + // + // Example JSON representation: + // + // { + // "path": "/flags/flagkey", + // "data": { + // "key": "flagkey", + // "version": 2, ...etc. + // } + // } + static final class PatchData { + final DataKind kind; + final String key; + final ItemDescriptor item; + + PatchData(DataKind kind, String key, ItemDescriptor item) { + this.kind = kind; + this.key = key; + this.item = item; + } + } + + // This is the logical representation of the data in the "delete" event. In the JSON representation, + // there is a "path" property in the format "/flags/key" or "/segments/key", which we convert into + // Kind and Key when we parse it. + // + // Example JSON representation: + // + // { + // "path": "/flags/flagkey", + // "version": 3 + // } + static final class DeleteData { + final DataKind kind; + final String key; + final int version; + + public DeleteData(DataKind kind, String key, int version) { + this.kind = kind; + this.key = key; + this.version = version; + } + } + + static PutData parsePutData(JsonReader jr) { + String path = null; + FullDataSet data = null; + + try { + jr.beginObject(); + while (jr.peek() != JsonToken.END_OBJECT) { + String prop = jr.nextName(); + switch (prop) { + case "path": + path = jr.nextString(); + break; + case "data": + data = parseFullDataSet(jr); + break; + default: + jr.skipValue(); + } + } + jr.endObject(); + + if (data == null) { + throw missingRequiredProperty("put", "data"); + } + + return new PutData(path, data); + } catch (IOException e) { + throw new SerializationException(e); + } catch (RuntimeException e) { + throw new SerializationException(e); + } + } + + static PatchData parsePatchData(JsonReader jr) { + // The logic here is a little convoluted because JSON object property ordering is arbitrary, so + // we don't know for sure that we'll see the "path" property before the "data" property, but we + // won't know what kind of object to parse "data" into until we know whether "path" starts with + // "/flags" or "/segments". So, if we see "data" first, we'll have to pull its value into a + // temporary buffer and parse it afterward, which is less efficient than parsing directly from + // the stream. However, in practice, the LD streaming service does send "path" first. + DataKind kind = null; + String key = null; + VersionedData dataItem = null; + JsonElement bufferedParsedData = null; + + try { + jr.beginObject(); + while (jr.peek() != JsonToken.END_OBJECT) { + String prop = jr.nextName(); + switch (prop) { + case "path": + KindAndKey kindAndKey = parsePath(jr.nextString()); + if (kindAndKey == null) { + // An unrecognized path isn't considered an error; we'll just return a null kind, + // indicating that we should ignore this event. + return new PatchData(null, null, null); + } + kind = kindAndKey.kind; + key = kindAndKey.key; + break; + case "data": + if (kind != null) { + dataItem = deserializeFromJsonReader(kind, jr); + } else { + bufferedParsedData = gsonInstance().fromJson(jr, JsonElement.class); + } + break; + default: + jr.skipValue(); + } + } + jr.endObject(); + + if (kind == null) { + throw missingRequiredProperty("patch", "path"); + } + if (dataItem == null) { + if (bufferedParsedData == null) { + throw missingRequiredProperty("patch", "path"); + } + dataItem = deserializeFromParsedJson(kind, bufferedParsedData); + } + return new PatchData(kind, key, new ItemDescriptor(dataItem.getVersion(), dataItem)); + } catch (IOException e) { + throw new SerializationException(e); + } catch (RuntimeException e) { + throw new SerializationException(e); + } + } + + static DeleteData parseDeleteData(JsonReader jr) { + DataKind kind = null; + String key = null; + Integer version = null; + + try { + jr.beginObject(); + while (jr.peek() != JsonToken.END_OBJECT) { + String prop = jr.nextName(); + switch (prop) { + case "path": + KindAndKey kindAndKey = parsePath(jr.nextString()); + if (kindAndKey == null) { + // An unrecognized path isn't considered an error; we'll just return a null kind, + // indicating that we should ignore this event. + return new DeleteData(null, null, 0); + } + kind = kindAndKey.kind; + key = kindAndKey.key; + break; + case "version": + version = jr.nextInt(); + break; + default: + jr.skipValue(); + } + } + jr.endObject(); + + if (kind == null) { + throw missingRequiredProperty("delete", "path"); + } + if (version == null) { + throw missingRequiredProperty("delete", "version"); + } + return new DeleteData(kind, key, version); + } catch (IOException e) { + throw new SerializationException(e); + } catch (RuntimeException e) { + throw new SerializationException(e); + } + } + + private static KindAndKey parsePath(String path) { + if (path == null) { + throw new JsonParseException("item path cannot be null"); + } + for (DataKind kind: ALL_DATA_KINDS) { + String prefix = (kind == SEGMENTS) ? "/segments/" : "/flags/"; + if (path.startsWith(prefix)) { + return new KindAndKey(kind, path.substring(prefix.length())); + } + } + return null; // we don't recognize the path - the caller should ignore this event, just as we ignore unknown event types + } + + private static JsonParseException missingRequiredProperty(String eventName, String propName) { + return new JsonParseException(String.format("stream \"{}\" event did not have required property \"{}\"", + eventName, propName)); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Util.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Util.java new file mode 100644 index 0000000..67c4b7d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Util.java @@ -0,0 +1,112 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Pattern; + +import static com.google.common.collect.Iterables.transform; + +import okhttp3.Authenticator; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.Route; + +abstract class Util { + private Util() {} + + static final Authenticator okhttpAuthenticatorFromHttpAuthStrategy(final HttpAuthentication strategy) { + return new Authenticator() { + public Request authenticate(Route route, Response response) throws IOException { + if (response.request().header("Proxy-Authorization") != null) { + return null; // Give up, we've already failed to authenticate + } + Iterable challenges = transform(response.challenges(), + c -> new HttpAuthentication.Challenge(c.scheme(), c.realm())); + String credential = strategy.provideAuthorization(challenges); + return response.request().newBuilder() + .header("Proxy-Authorization", credential) + .build(); + } + }; + } + + static String describeDuration(Duration d) { + if (d.toMillis() % 1000 == 0) { + if (d.toMillis() % 60000 == 0) { + return d.toMinutes() + (d.toMinutes() == 1 ? " minute" : " minutes"); + } else { + long sec = d.toMillis() / 1000; + return sec + (sec == 1 ? " second" : " seconds"); + } + } + return d.toMillis() + " milliseconds"; + } + + static void deleteDirectory(Path path) { + try { + Files.walkFileTree(path, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { + try { + Files.delete(file); + } catch (IOException e) {} + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) { + try { + Files.delete(dir); + } catch (IOException e) {} + return FileVisitResult.CONTINUE; + } + }); + } catch (IOException e) {} + } + + // Tag values must not be empty, and only contain letters, numbers, `.`, `_`, or `-`. + private static Pattern TAG_VALUE_REGEX = Pattern.compile("^[\\w.-]+$"); + + /** + * Builds the "X-LaunchDarkly-Tags" HTTP header out of the configured application info. + * + * @param applicationInfo the application metadata + * @return a space-separated string of tags, e.g. "application-id/authentication-service application-version/1.0.0" + */ + static String applicationTagHeader(ApplicationInfo applicationInfo, LDLogger logger) { + String[][] tags = { + {"applicationId", "application-id", applicationInfo.getApplicationId()}, + {"applicationVersion", "application-version", applicationInfo.getApplicationVersion()}, + }; + List parts = new ArrayList<>(); + for (String[] row : tags) { + String javaKey = row[0]; + String tagKey = row[1]; + String tagVal = row[2]; + if (tagVal == null) { + continue; + } + if (!TAG_VALUE_REGEX.matcher(tagVal).matches()) { + logger.warn("Value of ApplicationInfo.{} contained invalid characters and was discarded", javaKey); + continue; + } + if (tagVal.length() > 64) { + logger.warn("Value of ApplicationInfo.{} was longer than 64 characters and was discarded", javaKey); + continue; + } + parts.add(tagKey + "/" + tagVal); + } + return String.join(" ", parts); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Version.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Version.java new file mode 100644 index 0000000..82710e7 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/Version.java @@ -0,0 +1,10 @@ +package com.launchdarkly.sdk.server; + +abstract class Version { + private Version() {} + + // This constant is updated automatically by our Gradle script during a release, if the project version has changed + // x-release-please-start-version + static final String SDK_VERSION = "7.4.1"; + // x-release-please-end +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilder.java new file mode 100644 index 0000000..ffb0fde --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilder.java @@ -0,0 +1,84 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; + +/** + * Contains methods for configuring the SDK's application metadata. + *

    + * Application metadata may be used in LaunchDarkly analytics or other product features, but does not affect feature flag evaluations. + *

    + * If you want to set non-default values for any of these fields, create a builder with + * {@link Components#applicationInfo()}, change its properties with the methods of this class, + * and pass it to {@link com.launchdarkly.sdk.server.LDConfig.Builder#applicationInfo(ApplicationInfoBuilder)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .applicationInfo(
    + *             Components.applicationInfo()
    + *                 .applicationId("authentication-service")
    + *                 .applicationVersion("1.0.0")
    + *         )
    + *         .build();
    + * 
    + *

    + * + * @since 5.8.0 + */ +public final class ApplicationInfoBuilder { + private String applicationId; + private String applicationVersion; + + /** + * Create an empty ApplicationInfoBuilder. + * + * @see Components#applicationInfo() + */ + public ApplicationInfoBuilder() {} + + /** + * Sets a unique identifier representing the application where the LaunchDarkly SDK is running. + *

    + * This can be specified as any string value as long as it only uses the following characters: ASCII + * letters, ASCII digits, period, hyphen, underscore. A string containing any other characters will be + * ignored. + * + * @param applicationId the application identifier + * @return the builder + */ + public ApplicationInfoBuilder applicationId(String applicationId) { + this.applicationId = applicationId; + return this; + } + + /** + * Sets a unique identifier representing the version of the application where the LaunchDarkly SDK + * is running. + *

    + * This can be specified as any string value as long as it only uses the following characters: ASCII + * letters, ASCII digits, period, hyphen, underscore. A string containing any other characters will be + * ignored. + * + * @param applicationVersion the application version + * @return the builder + */ + public ApplicationInfoBuilder applicationVersion(String applicationVersion) { + this.applicationVersion = applicationVersion; + return this; + } + + /** + * Called internally by the SDK to create the configuration object. + * + * @return the configuration object + */ + public ApplicationInfo createApplicationInfo() { + return new ApplicationInfo(applicationId, applicationVersion); + } + + public static ApplicationInfoBuilder fromApplicationInfo(ApplicationInfo info) { + ApplicationInfoBuilder newBuilder = new ApplicationInfoBuilder(); + newBuilder.applicationId = info.getApplicationId(); + newBuilder.applicationVersion = info.getApplicationVersion(); + return newBuilder; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilder.java new file mode 100644 index 0000000..aac3816 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilder.java @@ -0,0 +1,165 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import java.time.Duration; + +/** + * Contains methods for configuring the SDK's Big Segments behavior. + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + *

    + * If you want non-default values for any of these properties create a builder with + * {@link Components#bigSegments(ComponentConfigurer)}, change its properties with the methods + * of this class, and pass it to {@link Builder#bigSegments(ComponentConfigurer)} + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .bigSegments(Components.bigSegments(Redis.dataStore().prefix("app1"))
    + *             .userCacheSize(2000))
    + *         .build();
    + * 
    + * + * @since 5.7.0 + */ +public final class BigSegmentsConfigurationBuilder implements ComponentConfigurer { + /** + * The default value for {@link #userCacheSize(int)}. + */ + public static final int DEFAULT_USER_CACHE_SIZE = 1000; + + /** + * The default value for {@link #userCacheTime(Duration)}. + */ + public static final Duration DEFAULT_USER_CACHE_TIME = Duration.ofSeconds(5); + + /** + * The default value for {@link #statusPollInterval(Duration)}. + */ + public static final Duration DEFAULT_STATUS_POLL_INTERVAL = Duration.ofSeconds(5); + + /** + * The default value for {@link #staleAfter(Duration)}. + */ + public static final Duration DEFAULT_STALE_AFTER = Duration.ofMinutes(2); + + private final ComponentConfigurer storeConfigurer; + private int userCacheSize = DEFAULT_USER_CACHE_SIZE; + private Duration userCacheTime = DEFAULT_USER_CACHE_TIME; + private Duration statusPollInterval = DEFAULT_STATUS_POLL_INTERVAL; + private Duration staleAfter = DEFAULT_STALE_AFTER; + + /** + * Creates a new builder for Big Segments configuration. + * + * @param storeConfigurer the factory implementation for the specific data store type + */ + public BigSegmentsConfigurationBuilder(ComponentConfigurer storeConfigurer) { + this.storeConfigurer = storeConfigurer; + } + + /** + * Sets the maximum number of users whose Big Segment state will be cached by the SDK at any given + * time. + *

    + * To reduce database traffic, the SDK maintains a least-recently-used cache by user key. When a + * feature flag that references a Big Segment is evaluated for some user who is not currently in + * the cache, the SDK queries the database for all Big Segment memberships of that user, and + * stores them together in a single cache entry. If the cache is full, the oldest entry is + * dropped. + *

    + * A higher value for {@code userCacheSize} means that database queries for Big Segments will be + * done less often for recently-referenced users, if the application has many users, at the cost + * of increased memory used by the cache. + *

    + * Cache entries can also expire based on the setting of {@link #userCacheTime(Duration)}. + * + * @param userCacheSize the maximum number of user states to cache + * @return the builder + * @see #DEFAULT_USER_CACHE_SIZE + */ + public BigSegmentsConfigurationBuilder userCacheSize(int userCacheSize) { + this.userCacheSize = Math.max(userCacheSize, 0); + return this; + } + + /** + * Sets the maximum length of time that the Big Segment state for a user will be cached by the + * SDK. + *

    + * See {@link #userCacheSize(int)} for more about this cache. A higher value for + * {@code userCacheTime} means that database queries for the Big Segment state of any given user + * will be done less often, but that changes to segment membership may not be detected as soon. + * + * @param userCacheTime the cache TTL (a value of null, or a negative value will be changed to + * {@link #DEFAULT_USER_CACHE_TIME} + * @return the builder + * @see #DEFAULT_USER_CACHE_TIME + */ + public BigSegmentsConfigurationBuilder userCacheTime(Duration userCacheTime) { + this.userCacheTime = userCacheTime != null && userCacheTime.compareTo(Duration.ZERO) >= 0 + ? userCacheTime : DEFAULT_USER_CACHE_TIME; + return this; + } + + /** + * Sets the interval at which the SDK will poll the Big Segment store to make sure it is available + * and to determine how long ago it was updated. + * + * @param statusPollInterval the status polling interval (a null, zero, or negative value will + * be changed to {@link #DEFAULT_STATUS_POLL_INTERVAL}) + * @return the builder + * @see #DEFAULT_STATUS_POLL_INTERVAL + */ + public BigSegmentsConfigurationBuilder statusPollInterval(Duration statusPollInterval) { + this.statusPollInterval = statusPollInterval != null && statusPollInterval.compareTo(Duration.ZERO) > 0 + ? statusPollInterval : DEFAULT_STATUS_POLL_INTERVAL; + return this; + } + + /** + * Sets the maximum length of time between updates of the Big Segments data before the data is + * considered out of date. + *

    + * Normally, the LaunchDarkly Relay Proxy updates a timestamp in the Big Segments store at + * intervals to confirm that it is still in sync with the LaunchDarkly data, even if there have + * been no changes to the data. If the timestamp falls behind the current time by the amount + * specified by {@code staleAfter}, the SDK assumes that something is not working correctly in + * this process and that the data may not be accurate. + *

    + * While in a stale state, the SDK will still continue using the last known data, but + * {@link com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.Status} will return + * true in its {@code stale} property, and any {@link EvaluationReason} generated from a feature + * flag that references a Big Segment will have a {@link BigSegmentsStatus} of + * {@link BigSegmentsStatus#STALE}. + * + * @param staleAfter the time limit for marking the data as stale (a null, zero, or negative + * value will be changed to {@link #DEFAULT_STALE_AFTER}) + * @return the builder + * @see #DEFAULT_STALE_AFTER + */ + public BigSegmentsConfigurationBuilder staleAfter(Duration staleAfter) { + this.staleAfter = staleAfter != null && staleAfter.compareTo(Duration.ZERO) > 0 + ? staleAfter : DEFAULT_STALE_AFTER; + return this; + } + + @Override + public BigSegmentsConfiguration build(ClientContext context) { + BigSegmentStore store = storeConfigurer == null ? null : storeConfigurer.build(context); + return new BigSegmentsConfiguration( + store, + userCacheSize, + userCacheTime, + statusPollInterval, + staleAfter); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EvaluationSeriesContext.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EvaluationSeriesContext.java new file mode 100644 index 0000000..7961f28 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EvaluationSeriesContext.java @@ -0,0 +1,47 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import java.util.Map; + +/** + * Represents parameters associated with a feature flag evaluation. An instance of this class is provided to some + * stages of series of a {@link Hook} implementation. For example, see {@link Hook#beforeEvaluation(EvaluationSeriesContext, Map)} + */ +public class EvaluationSeriesContext { + + /** + * The variation method that was used to invoke the evaluation. The stability of this string is not + * guaranteed and should not be used in conditional logic. + */ + public final String method; + + /** + * The key of the feature flag being evaluated. + */ + public final String flagKey; + + /** + * The context the evaluation was for. + */ + public final LDContext context; + + /** + * The user-provided default value for the evaluation. + */ + public final LDValue defaultValue; + + /** + * @param method the variation method that was used to invoke the evaluation. + * @param key the key of the feature flag being evaluated. + * @param context the context the evaluation was for. + * @param defaultValue the user-provided default value for the evaluation. + */ + public EvaluationSeriesContext(String method, String key, LDContext context, LDValue defaultValue) { + this.flagKey = key; + this.context = context; + this.defaultValue = defaultValue; + this.method = method; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilder.java new file mode 100644 index 0000000..4c79c4e --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilder.java @@ -0,0 +1,214 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.EventSender; + +import java.time.Duration; +import java.util.HashSet; +import java.util.Set; + +/** + * Contains methods for configuring delivery of analytics events. + *

    + * The SDK normally buffers analytics events and sends them to LaunchDarkly at intervals. If you want + * to customize this behavior, create a builder with {@link Components#sendEvents()}, change its + * properties with the methods of this class, and pass it to {@link Builder#events(ComponentConfigurer)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .events(Components.sendEvents().capacity(5000).flushIntervalSeconds(2))
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#sendEvents()}. + * + * @since 4.12.0 + */ +public abstract class EventProcessorBuilder implements ComponentConfigurer { + /** + * The default value for {@link #capacity(int)}. + */ + public static final int DEFAULT_CAPACITY = 10000; + + /** + * The default value for {@link #diagnosticRecordingInterval(Duration)}: 15 minutes. + */ + public static final Duration DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL = Duration.ofMinutes(15); + + /** + * The default value for {@link #flushInterval(Duration)}: 5 seconds. + */ + public static final Duration DEFAULT_FLUSH_INTERVAL = Duration.ofSeconds(5); + + /** + * The default value for {@link #userKeysCapacity(int)}. + */ + public static final int DEFAULT_USER_KEYS_CAPACITY = 1000; + + /** + * The default value for {@link #userKeysFlushInterval(Duration)}: 5 minutes. + */ + public static final Duration DEFAULT_USER_KEYS_FLUSH_INTERVAL = Duration.ofMinutes(5); + + /** + * The minimum value for {@link #diagnosticRecordingInterval(Duration)}: 60 seconds. + */ + public static final Duration MIN_DIAGNOSTIC_RECORDING_INTERVAL = Duration.ofSeconds(60); + + protected boolean allAttributesPrivate = false; + protected int capacity = DEFAULT_CAPACITY; + protected Duration diagnosticRecordingInterval = DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL; + protected Duration flushInterval = DEFAULT_FLUSH_INTERVAL; + protected Set privateAttributes; + protected int userKeysCapacity = DEFAULT_USER_KEYS_CAPACITY; + protected Duration userKeysFlushInterval = DEFAULT_USER_KEYS_FLUSH_INTERVAL; + protected ComponentConfigurer eventSenderConfigurer = null; + + /** + * Sets whether or not all optional user attributes should be hidden from LaunchDarkly. + *

    + * If this is {@code true}, all user attribute values (other than the key) will be private, not just + * the attributes specified in {@link #privateAttributes(String...)} or on a per-user basis with + * {@link com.launchdarkly.sdk.ContextBuilder} methods. By default, it is {@code false}. + * + * @param allAttributesPrivate true if all user attributes should be private + * @return the builder + * @see #privateAttributes(String...) + * @see com.launchdarkly.sdk.ContextBuilder + */ + public EventProcessorBuilder allAttributesPrivate(boolean allAttributesPrivate) { + this.allAttributesPrivate = allAttributesPrivate; + return this; + } + + /** + * Set the capacity of the events buffer. + *

    + * The client buffers up to this many events in memory before flushing. If the capacity is exceeded before + * the buffer is flushed (see {@link #flushInterval(Duration)}, events will be discarded. Increasing the + * capacity means that events are less likely to be discarded, at the cost of consuming more memory. + *

    + * The default value is {@link #DEFAULT_CAPACITY}. + * + * @param capacity the capacity of the event buffer + * @return the builder + */ + public EventProcessorBuilder capacity(int capacity) { + this.capacity = capacity; + return this; + } + + /** + * Sets the interval at which periodic diagnostic data is sent. + *

    + * The default value is {@link #DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL}; the minimum value is + * {@link #MIN_DIAGNOSTIC_RECORDING_INTERVAL}. This property is ignored if + * {@link Builder#diagnosticOptOut(boolean)} is set to {@code true}. + * + * @see Builder#diagnosticOptOut(boolean) + * + * @param diagnosticRecordingInterval the diagnostics interval; null to use the default + * @return the builder + */ + public EventProcessorBuilder diagnosticRecordingInterval(Duration diagnosticRecordingInterval) { + if (diagnosticRecordingInterval == null) { + this.diagnosticRecordingInterval = DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL; + } else { + this.diagnosticRecordingInterval = diagnosticRecordingInterval.compareTo(MIN_DIAGNOSTIC_RECORDING_INTERVAL) < 0 ? + MIN_DIAGNOSTIC_RECORDING_INTERVAL : diagnosticRecordingInterval; + } + return this; + } + + /** + * Specifies a custom implementation for event delivery. + *

    + * The standard event delivery implementation sends event data via HTTP/HTTPS to the LaunchDarkly events + * service endpoint (or any other endpoint specified with {@link Builder#serviceEndpoints(ServiceEndpointsBuilder)}. + * Providing a custom implementation may be useful in tests, or if the event data needs to be stored and forwarded. + * + * @param eventSenderConfigurer a factory for an {@link EventSender} implementation + * @return the builder + */ + public EventProcessorBuilder eventSender(ComponentConfigurer eventSenderConfigurer) { + this.eventSenderConfigurer = eventSenderConfigurer; + return this; + } + + /** + * Sets the interval between flushes of the event buffer. + *

    + * Decreasing the flush interval means that the event buffer is less likely to reach capacity. + *

    + * The default value is {@link #DEFAULT_FLUSH_INTERVAL}. + * + * @param flushInterval the flush interval; null to use the default + * @return the builder + */ + public EventProcessorBuilder flushInterval(Duration flushInterval) { + this.flushInterval = flushInterval == null ? DEFAULT_FLUSH_INTERVAL : flushInterval; + return this; + } + + /** + * Marks a set of attribute names or subproperties as private. + *

    + * Any contexts sent to LaunchDarkly with this configuration active will have attributes with these + * names removed. This is in addition to any attributes that were marked as private for an + * individual context with {@link com.launchdarkly.sdk.ContextBuilder} methods. + *

    + * If and only if a parameter starts with a slash, it is interpreted as a slash-delimited path that + * can denote a nested property within a JSON object. For instance, "/address/street" means that if + * there is an attribute called "address" that is a JSON object, and one of the object's properties + * is "street", the "street" property will be redacted from the analytics data but other properties + * within "address" will still be sent. This syntax also uses the JSON Pointer convention of escaping + * a literal slash character as "~1" and a tilde as "~0". + *

    + * This method replaces any previous private attributes that were set on the same builder, rather + * than adding to them. + * + * @param attributeNames a set of names or paths that will be removed from context data set to LaunchDarkly + * @return the builder + * @see #allAttributesPrivate(boolean) + * @see com.launchdarkly.sdk.ContextBuilder#privateAttributes(String...) + */ + public EventProcessorBuilder privateAttributes(String... attributeNames) { + privateAttributes = new HashSet<>(); + for (String a: attributeNames) { + privateAttributes.add(AttributeRef.fromPath(a)); + } + return this; + } + + /** + * Sets the number of user keys that the event processor can remember at any one time. + *

    + * To avoid sending duplicate user details in analytics events, the SDK maintains a cache of + * recently seen user keys, expiring at an interval set by {@link #userKeysFlushInterval(Duration)}. + *

    + * The default value is {@link #DEFAULT_USER_KEYS_CAPACITY}. + * + * @param userKeysCapacity the maximum number of user keys to remember + * @return the builder + */ + public EventProcessorBuilder userKeysCapacity(int userKeysCapacity) { + this.userKeysCapacity = userKeysCapacity; + return this; + } + + /** + * Sets the interval at which the event processor will reset its cache of known user keys. + *

    + * The default value is {@link #DEFAULT_USER_KEYS_FLUSH_INTERVAL}. + * + * @param userKeysFlushInterval the flush interval; null to use the default + * @return the builder + */ + public EventProcessorBuilder userKeysFlushInterval(Duration userKeysFlushInterval) { + this.userKeysFlushInterval = userKeysFlushInterval == null ? DEFAULT_USER_KEYS_FLUSH_INTERVAL : userKeysFlushInterval; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileData.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileData.java new file mode 100644 index 0000000..c75b64c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileData.java @@ -0,0 +1,142 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.LDConfig.Builder; + +/** + * Integration between the LaunchDarkly SDK and file data. + *

    + * The file data source allows you to use local files as a source of feature flag state. This would + * typically be used in a test environment, to operate using a predetermined feature flag state + * without an actual LaunchDarkly connection. See {@link #dataSource()} for details. + *

    + * This is different from {@link TestData}, which allows you to simulate flag configurations + * programmatically rather than using a file. + * + * @since 4.12.0 + * @see TestData + */ +public abstract class FileData { + /** + * Determines how duplicate feature flag or segment keys are handled. + * + * @see FileDataSourceBuilder#duplicateKeysHandling + * @since 5.3.0 + */ + public enum DuplicateKeysHandling { + /** + * Data loading will fail if keys are duplicated across files. + */ + FAIL, + + /** + * Keys that are duplicated across files will be ignored, and the first occurrence will be used. + */ + IGNORE + } + + /** + * Creates a {@link FileDataSourceBuilder} which you can use to configure the file data source. + * This allows you to use local files (or classpath resources containing file data) as a source of + * feature flag state, instead of using an actual LaunchDarkly connection. + *

    + * This object can be modified with {@link FileDataSourceBuilder} methods for any desired + * custom settings, before including it in the SDK configuration with + * {@link Builder#dataSource(com.launchdarkly.sdk.server.subsystems.ComponentConfigurer)}. + *

    + * At a minimum, you will want to call {@link FileDataSourceBuilder#filePaths(String...)} to specify + * your data file(s); you can also use {@link FileDataSourceBuilder#autoUpdate(boolean)} to + * specify that flags should be reloaded when a file is modified. See {@link FileDataSourceBuilder} + * for all configuration options. + *

    +   *     FileDataSourceFactory f = FileData.dataSource()
    +   *         .filePaths("./testData/flags.json")
    +   *         .autoUpdate(true);
    +   *     LDConfig config = new LDConfig.Builder()
    +   *         .dataSource(f)
    +   *         .build();
    +   * 
    + *

    + * This will cause the client not to connect to LaunchDarkly to get feature flags. The + * client may still make network connections to send analytics events, unless you have disabled + * this with {@link com.launchdarkly.sdk.server.Components#noEvents()}. IMPORTANT: Do not + * set {@link Builder#offline(boolean)} to {@code true}; doing so + * would not just put the SDK "offline" with regard to LaunchDarkly, but will completely turn off + * all flag data sources to the SDK including the file data source. + *

    + * Flag data files can be either JSON or YAML. They contain an object with three possible + * properties: + *

      + *
    • {@code flags}: Feature flag definitions. + *
    • {@code flagVersions}: Simplified feature flags that contain only a value. + *
    • {@code segments}: User segment definitions. + *
    + *

    + * The format of the data in {@code flags} and {@code segments} is defined by the LaunchDarkly application + * and is subject to change. Rather than trying to construct these objects yourself, it is simpler + * to request existing flags directly from the LaunchDarkly server in JSON format, and use this + * output as the starting point for your file. In Linux you would do this: + *

    +   *     curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all
    +   * 
    + *

    + * The output will look something like this (but with many more properties): + *

    +   * {
    +   *     "flags": {
    +   *         "flag-key-1": {
    +   *             "key": "flag-key-1",
    +   *             "on": true,
    +   *             "variations": [ "a", "b" ]
    +   *         },
    +   *         "flag-key-2": {
    +   *             "key": "flag-key-2",
    +   *             "on": true,
    +   *             "variations": [ "c", "d" ]
    +   *         }
    +   *     },
    +   *     "segments": {
    +   *         "segment-key-1": {
    +   *             "key": "segment-key-1",
    +   *             "includes": [ "user-key-1" ]
    +   *         }
    +   *     }
    +   * }
    +   * 
    + *

    + * Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + * by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + * set specific flag keys to specific values. For that, you can use a much simpler format: + *

    +   * {
    +   *     "flagValues": {
    +   *         "my-string-flag-key": "value-1",
    +   *         "my-boolean-flag-key": true,
    +   *         "my-integer-flag-key": 3
    +   *     }
    +   * }
    +   * 
    + *

    + * Or, in YAML: + *

    +   * flagValues:
    +   *   my-string-flag-key: "value-1"
    +   *   my-boolean-flag-key: true
    +   *   my-integer-flag-key: 3
    +   * 
    + *

    + * It is also possible to specify both {@code flags} and {@code flagValues}, if you want some flags + * to have simple values and others to have complex behavior. However, it is an error to use the + * same flag key or segment key more than once, either in a single file or across multiple files. + *

    + * If the data source encounters any error in any file-- malformed content, a missing file, or a + * duplicate key-- it will not load flags from any of the files. + * + * @return a data source configuration object + * @since 4.12.0 + */ + public static FileDataSourceBuilder dataSource() { + return new FileDataSourceBuilder(); + } + + private FileData() {} +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceBuilder.java new file mode 100644 index 0000000..14d607c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceBuilder.java @@ -0,0 +1,182 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.io.ByteStreams; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; + +/** + * To use the file data source, obtain a new instance of this class with {@link FileData#dataSource()}; + * call the builder method {@link #filePaths(String...)} to specify file path(s), and/or + * {@link #classpathResources(String...)} to specify classpath data resources; then pass the resulting + * object to {@link Builder#dataSource(ComponentConfigurer)}. + *

    + * For more details, see {@link FileData}. + * + * @since 4.12.0 + */ +public final class FileDataSourceBuilder implements ComponentConfigurer { + final List sources = new ArrayList<>(); // visible for tests + private boolean autoUpdate = false; + private FileData.DuplicateKeysHandling duplicateKeysHandling = FileData.DuplicateKeysHandling.FAIL; + + /** + * Adds any number of source files for loading flag data, specifying each file path as a string. The files will + * not actually be loaded until the LaunchDarkly client starts up. + *

    + * Files will be parsed as JSON if their first non-whitespace character is '{'. Otherwise, they will be parsed as YAML. + * + * @param filePaths path(s) to the source file(s); may be absolute or relative to the current working directory + * @return the same factory object + * + * @throws InvalidPathException if one of the parameters is not a valid file path + */ + public FileDataSourceBuilder filePaths(String... filePaths) throws InvalidPathException { + for (String p: filePaths) { + sources.add(new FilePathSourceInfo(Paths.get(p))); + } + return this; + } + + /** + * Adds any number of source files for loading flag data, specifying each file path as a Path. The files will + * not actually be loaded until the LaunchDarkly client starts up. + *

    + * Files will be parsed as JSON if their first non-whitespace character is '{'. Otherwise, they will be parsed as YAML. + * + * @param filePaths path(s) to the source file(s); may be absolute or relative to the current working directory + * @return the same factory object + */ + public FileDataSourceBuilder filePaths(Path... filePaths) { + for (Path p: filePaths) { + sources.add(new FilePathSourceInfo(p)); + } + return this; + } + + /** + * Adds any number of classpath resources for loading flag data. The resources will not actually be loaded until the + * LaunchDarkly client starts up. + *

    + * Files will be parsed as JSON if their first non-whitespace character is '{'. Otherwise, they will be parsed as YAML. + * + * @param resourceLocations resource location(s) in the format used by {@code ClassLoader.getResource()}; these + * are absolute paths, so for instance a resource called "data.json" in the package "com.mypackage" would have + * the location "/com/mypackage/data.json" + * @return the same factory object + */ + public FileDataSourceBuilder classpathResources(String... resourceLocations) { + for (String location: resourceLocations) { + sources.add(new ClasspathResourceSourceInfo(location)); + } + return this; + } + + /** + * Specifies whether the data source should watch for changes to the source file(s) and reload flags + * whenever there is a change. By default, it will not, so the flags will only be loaded once. This feature + * only works with real files, not with {@link #classpathResources(String...)}. + *

    + * Note that auto-updating will only work if all of the files you specified have valid directory paths at + * startup time; if a directory does not exist, creating it later will not result in files being loaded from it. + *

    + * The performance of this feature depends on what implementation of {@code java.nio.file.WatchService} is + * available in the Java runtime. On Linux and Windows, an implementation based on native filesystem APIs + * should be available. On MacOS, there is a long-standing known issue where due to the lack of such an + * implementation, it must use a file polling approach that can take up to 10 seconds to detect a change. + * + * @param autoUpdate true if flags should be reloaded whenever a source file changes + * @return the same factory object + */ + public FileDataSourceBuilder autoUpdate(boolean autoUpdate) { + this.autoUpdate = autoUpdate; + return this; + } + + /** + * Specifies how to handle keys that are duplicated across files. + *

    + * By default, data loading will fail if keys are duplicated across files ({@link FileData.DuplicateKeysHandling#FAIL}). + * + * @param duplicateKeysHandling specifies how to handle duplicate keys + * @return the same factory object + * @since 5.3.0 + */ + public FileDataSourceBuilder duplicateKeysHandling(FileData.DuplicateKeysHandling duplicateKeysHandling) { + this.duplicateKeysHandling = duplicateKeysHandling; + return this; + } + + @Override + public DataSource build(ClientContext context) { + LDLogger logger = context.getBaseLogger().subLogger("DataSource"); + return new FileDataSourceImpl(context.getDataSourceUpdateSink(), sources, autoUpdate, duplicateKeysHandling, logger); + } + + static abstract class SourceInfo { + abstract byte[] readData() throws IOException; + abstract Path toFilePath(); + } + + static final class FilePathSourceInfo extends SourceInfo { + final Path path; + + FilePathSourceInfo(Path path) { + this.path = path; + } + + @Override + byte[] readData() throws IOException { + return Files.readAllBytes(path); + } + + @Override + Path toFilePath() { + return path; + } + + @Override + public String toString() { + return path.toString(); + } + } + + static final class ClasspathResourceSourceInfo extends SourceInfo { + String location; + + ClasspathResourceSourceInfo(String location) { + this.location = location; + } + + @Override + byte[] readData() throws IOException { + try (InputStream is = getClass().getClassLoader().getResourceAsStream(location)) { + if (is == null) { + throw new IOException("classpath resource not found"); + } + return ByteStreams.toByteArray(is); + } + } + + @Override + Path toFilePath() { + return null; + } + + @Override + public String toString() { + return "classpath:" + location; + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceImpl.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceImpl.java new file mode 100644 index 0000000..08f39a2 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceImpl.java @@ -0,0 +1,302 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableList; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.integrations.FileDataSourceBuilder.SourceInfo; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FileDataException; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FlagFactory; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FlagFileParser; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FlagFileRep; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.file.FileSystem; +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.nio.file.WatchEvent; +import java.nio.file.WatchKey; +import java.nio.file.WatchService; +import java.nio.file.Watchable; +import java.time.Instant; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE; +import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE; +import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; + +/** + * Implements taking flag data from files and putting it into the data store, at startup time and + * optionally whenever files change. + */ +final class FileDataSourceImpl implements DataSource { + private final DataSourceUpdateSink dataSourceUpdates; + private final DataLoader dataLoader; + private final FileData.DuplicateKeysHandling duplicateKeysHandling; + private final AtomicBoolean inited = new AtomicBoolean(false); + private final FileWatcher fileWatcher; + private final LDLogger logger; + + FileDataSourceImpl( + DataSourceUpdateSink dataSourceUpdates, + List sources, + boolean autoUpdate, + FileData.DuplicateKeysHandling duplicateKeysHandling, + LDLogger logger + ) { + this.dataSourceUpdates = dataSourceUpdates; + this.dataLoader = new DataLoader(sources); + this.duplicateKeysHandling = duplicateKeysHandling; + this.logger = logger; + + FileWatcher fw = null; + if (autoUpdate) { + try { + fw = FileWatcher.create(dataLoader.getSources(), logger); + } catch (IOException e) { + // COVERAGE: there is no way to simulate this condition in a unit test + logger.error("Unable to watch files for auto-updating: {}", e.toString()); + logger.debug(e.toString(), e); + fw = null; + } + } + fileWatcher = fw; + } + + @Override + public Future start() { + final Future initFuture = CompletableFuture.completedFuture(null); + + reload(); + + // Note that if reload() finds any errors, it will not set our status to "initialized". But we + // will still do all the other startup steps, because we still might end up getting valid data + // if we are told to reload by the file watcher. + + if (fileWatcher != null) { + fileWatcher.start(this::reload); + } + + return initFuture; + } + + private boolean reload() { + DataBuilder builder = new DataBuilder(duplicateKeysHandling); + try { + dataLoader.load(builder); + } catch (FileDataException e) { + logger.error(e.getDescription()); + dataSourceUpdates.updateStatus(State.INTERRUPTED, + new ErrorInfo(ErrorKind.INVALID_DATA, 0, e.getDescription(), Instant.now())); + return false; + } + dataSourceUpdates.init(builder.build()); + dataSourceUpdates.updateStatus(State.VALID, null); + inited.set(true); + return true; + } + + @Override + public boolean isInitialized() { + return inited.get(); + } + + @Override + public void close() throws IOException { + if (fileWatcher != null) { + fileWatcher.stop(); + } + } + + /** + * If auto-updating is enabled, this component watches for file changes on a worker thread. + */ + private static final class FileWatcher implements Runnable { + private final WatchService watchService; + private final Set watchedFilePaths; + private Runnable fileModifiedAction; + private final Thread thread; + private final LDLogger logger; + private volatile boolean stopped; + + private static FileWatcher create(Iterable sources, LDLogger logger) throws IOException { + Set directoryPaths = new HashSet<>(); + Set absoluteFilePaths = new HashSet<>(); + FileSystem fs = FileSystems.getDefault(); + WatchService ws = fs.newWatchService(); + + // In Java, you watch for filesystem changes at the directory level, not for individual files. + for (SourceInfo s: sources) { + Path p = s.toFilePath(); + if (p != null) { + absoluteFilePaths.add(p); + directoryPaths.add(p.getParent()); + } + } + for (Path d: directoryPaths) { + d.register(ws, ENTRY_CREATE, ENTRY_MODIFY, ENTRY_DELETE); + } + + return new FileWatcher(ws, absoluteFilePaths, logger); + } + + private FileWatcher(WatchService watchService, Set watchedFilePaths, LDLogger logger) { + this.watchService = watchService; + this.watchedFilePaths = watchedFilePaths; + this.logger = logger; + + thread = new Thread(this, FileDataSourceImpl.class.getName()); + thread.setDaemon(true); + } + + public void run() { + while (!stopped) { + try { + WatchKey key = watchService.take(); // blocks until a change is available or we are interrupted + boolean watchedFileWasChanged = false; + for (WatchEvent event: key.pollEvents()) { + Watchable w = key.watchable(); + Object context = event.context(); + if (w instanceof Path && context instanceof Path) { + Path dirPath = (Path)w; + Path fileNamePath = (Path)context; + Path absolutePath = dirPath.resolve(fileNamePath); + if (watchedFilePaths.contains(absolutePath)) { + watchedFileWasChanged = true; + break; + } + } + } + if (watchedFileWasChanged) { + try { + fileModifiedAction.run(); + } catch (Exception e) { + // COVERAGE: there is no way to simulate this condition in a unit test + logger.warn("Unexpected exception when reloading file data: {}", LogValues.exceptionSummary(e)); + } + } + key.reset(); // if we don't do this, the watch on this key stops working + } catch (InterruptedException e) { + // if we've been stopped we will drop out at the top of the while loop + } + } + } + + public void start(Runnable fileModifiedAction) { + this.fileModifiedAction = fileModifiedAction; + thread.start(); + } + + public void stop() { + stopped = true; + thread.interrupt(); + } + } + + /** + * Implements the loading of flag data from one or more files. Will throw an exception if any file can't + * be read or parsed, or if any flag or segment keys are duplicates. + */ + static final class DataLoader { + private final List sources; + private final AtomicInteger lastVersion; + + public DataLoader(List sources) { + this.sources = new ArrayList<>(sources); + this.lastVersion = new AtomicInteger(0); + } + + public Iterable getSources() { + return sources; + } + + public void load(DataBuilder builder) throws FileDataException + { + int version = lastVersion.incrementAndGet(); + for (SourceInfo s: sources) { + try { + byte[] data = s.readData(); + FlagFileParser parser = FlagFileParser.selectForContent(data); + FlagFileRep fileContents = parser.parse(new ByteArrayInputStream(data)); + if (fileContents.flags != null) { + for (Map.Entry e: fileContents.flags.entrySet()) { + builder.add(FEATURES, e.getKey(), FlagFactory.flagFromJson(e.getValue(), version)); + } + } + if (fileContents.flagValues != null) { + for (Map.Entry e: fileContents.flagValues.entrySet()) { + builder.add(FEATURES, e.getKey(), FlagFactory.flagWithValue(e.getKey(), e.getValue(), version)); + } + } + if (fileContents.segments != null) { + for (Map.Entry e: fileContents.segments.entrySet()) { + builder.add(SEGMENTS, e.getKey(), FlagFactory.segmentFromJson(e.getValue(), version)); + } + } + } catch (FileDataException e) { + throw new FileDataException(e.getMessage(), e.getCause(), s); + } catch (IOException e) { + throw new FileDataException(null, e, s); + } + } + } + } + + /** + * Internal data structure that organizes flag/segment data into the format that the feature store + * expects. Will throw an exception if we try to add the same flag or segment key more than once. + */ + static final class DataBuilder { + private final Map> allData = new HashMap<>(); + private final FileData.DuplicateKeysHandling duplicateKeysHandling; + + public DataBuilder(FileData.DuplicateKeysHandling duplicateKeysHandling) { + this.duplicateKeysHandling = duplicateKeysHandling; + } + + public FullDataSet build() { + ImmutableList.Builder>> allBuilder = ImmutableList.builder(); + for (Map.Entry> e0: allData.entrySet()) { + allBuilder.add(new AbstractMap.SimpleEntry<>(e0.getKey(), new KeyedItems<>(e0.getValue().entrySet()))); + } + return new FullDataSet<>(allBuilder.build()); + } + + public void add(DataKind kind, String key, ItemDescriptor item) throws FileDataException { + Map items = allData.get(kind); + if (items == null) { + items = new HashMap(); + allData.put(kind, items); + } + if (items.containsKey(key)) { + if (duplicateKeysHandling == FileData.DuplicateKeysHandling.IGNORE) { + return; + } + throw new FileDataException("in " + kind.getName() + ", key \"" + key + "\" was already defined", null, null); + } + items.put(key, item); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceParsing.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceParsing.java new file mode 100644 index 0000000..9571c44 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/FileDataSourceParsing.java @@ -0,0 +1,217 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSyntaxException; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.server.integrations.FileDataSourceBuilder.SourceInfo; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.error.YAMLException; +import org.yaml.snakeyaml.representer.Representer; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.util.Map; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; + +abstract class FileDataSourceParsing { + private FileDataSourceParsing() {} + + /** + * Indicates that the file processor encountered an error in one of the input files. This exception is + * not surfaced to the host application, it is only logged, and we don't do anything different programmatically + * with different kinds of exceptions, therefore it has no subclasses. + */ + @SuppressWarnings("serial") + static final class FileDataException extends Exception { + private final SourceInfo source; + + public FileDataException(String message, Throwable cause, SourceInfo source) { + super(message, cause); + this.source = source; + } + + public FileDataException(String message, Throwable cause) { + this(message, cause, null); + } + + public String getDescription() { + StringBuilder s = new StringBuilder(); + if (getMessage() != null) { + s.append(getMessage()); + s.append(" "); + } + s.append("[").append(getCause().toString()).append("]"); + if (source != null) { + s.append(": ").append(source.toString()); + } + return s.toString(); + } + } + + /** + * The basic data structure that we expect all source files to contain. Note that we don't try to + * parse the flags or segments at this level; that will be done by {@link FlagFactory}. + */ + static final class FlagFileRep { + Map flags; + Map flagValues; + Map segments; + + FlagFileRep() {} + } + + static abstract class FlagFileParser { + private static final FlagFileParser jsonParser = new JsonFlagFileParser(); + private static final FlagFileParser yamlParser = new YamlFlagFileParser(); + + public abstract FlagFileRep parse(InputStream input) throws FileDataException, IOException; + + public static FlagFileParser selectForContent(byte[] data) { + Reader r = new InputStreamReader(new ByteArrayInputStream(data)); + return detectJson(r) ? jsonParser : yamlParser; + } + + private static boolean detectJson(Reader r) { + // A valid JSON file for our purposes must be an object, i.e. it must start with '{' + while (true) { + try { + int ch = r.read(); + if (ch < 0) { + return false; + } + if (ch == '{') { + return true; + } + if (!Character.isWhitespace(ch)) { + return false; + } + } catch (IOException e) { + // COVERAGE: there is no way to simulate this condition in a unit test + return false; + } + } + } + } + + static final class JsonFlagFileParser extends FlagFileParser { + private static final Gson gson = new Gson(); + + @Override + public FlagFileRep parse(InputStream input) throws FileDataException, IOException { + try { + return parseJson(gson.fromJson(new InputStreamReader(input), JsonElement.class)); + } catch (JsonSyntaxException e) { + throw new FileDataException("cannot parse JSON", e); + } + } + + public FlagFileRep parseJson(JsonElement tree) throws FileDataException, IOException { + try { + return gson.fromJson(tree, FlagFileRep.class); + } catch (JsonSyntaxException e) { + // COVERAGE: there is no way to simulate this condition in a unit test + throw new FileDataException("cannot parse JSON", e); + } + } + } + + /** + * Parses a FlagFileRep from a YAML file. Two notes about this implementation: + *

    + * 1. We already have logic for parsing (and building) flags using Gson, and would rather not repeat + * that logic. So, rather than telling SnakeYAML to parse the file directly into a FlagFileRep object - + * and providing SnakeYAML-specific methods for building flags - we are just parsing the YAML into + * simple Java objects and then feeding that data into the Gson parser. This is admittedly inefficient, + * but it also means that we don't have to worry about any differences between how Gson unmarshals an + * object and how the YAML parser does it. We already know Gson does the right thing for the flag and + * segment classes, because that's what we use in the SDK. + *

    + * 2. Ideally, it should be possible to have just one parser, since any valid JSON document is supposed + * to also be parseable as YAML. However, at present, that doesn't work: + *

      + *
    • SnakeYAML (1.19) rejects many valid JSON documents due to simple things like whitespace. + * Apparently this is due to supporting only YAML 1.1, not YAML 1.2 which has full JSON support. + *
    • snakeyaml-engine (https://bitbucket.org/asomov/snakeyaml-engine) says it can handle any JSON, + * but it's only for Java 8 and above. + *
    • YamlBeans (https://github.com/EsotericSoftware/yamlbeans) only works right if you're parsing + * directly into a Java bean instance (which FeatureFlag is not). If you try the "parse to simple + * Java types (and then feed them into Gson)" approach, it does not correctly parse non-string types + * (i.e. it treats true as "true"). (https://github.com/EsotericSoftware/yamlbeans/issues/7) + *
    + */ + static final class YamlFlagFileParser extends FlagFileParser { + private static final Yaml yaml = new Yaml(); + // Using SafeConstructor disables instantiation of arbitrary classes - https://github.com/launchdarkly/java-server-sdk/issues/288 + private static final Gson gson = new Gson(); + private static final JsonFlagFileParser jsonFileParser = new JsonFlagFileParser(); + + @Override + public FlagFileRep parse(InputStream input) throws FileDataException, IOException { + Object root; + try { + root = yaml.load(input); + } catch (YAMLException e) { + throw new FileDataException("unable to parse YAML", e); + } + JsonElement jsonRoot = root == null ? new JsonObject() : gson.toJsonTree(root); + return jsonFileParser.parseJson(jsonRoot); + } + } + + /** + * Creates flag or segment objects from raw JSON. + * + * Note that the {@code FeatureFlag} and {@code Segment} classes are not public in the Java + * client, so we refer to those class objects indirectly via {@code VersionedDataKind}; and + * if we want to construct a flag from scratch, we can't use the constructor but instead must + * build some JSON and then parse that. + */ + static abstract class FlagFactory { + private FlagFactory() {} + + static ItemDescriptor flagFromJson(LDValue jsonTree, int version) { + return FEATURES.deserialize(replaceVersion(jsonTree, version).toJsonString()); + } + + /** + * Constructs a flag that always returns the same value. This is done by giving it a single + * variation and setting the fallthrough variation to that. + */ + static ItemDescriptor flagWithValue(String key, LDValue jsonValue, int version) { + LDValue o = LDValue.buildObject() + .put("key", key) + .put("version", version) + .put("on", true) + .put("variations", LDValue.buildArray().add(jsonValue).build()) + .put("fallthrough", LDValue.buildObject().put("variation", 0).build()) + .build(); + // Note that LaunchDarkly normally prevents you from creating a flag with just one variation, + // but it's the application that validates that; the SDK doesn't care. + return FEATURES.deserialize(o.toJsonString()); + } + + static ItemDescriptor segmentFromJson(LDValue jsonTree, int version) { + return SEGMENTS.deserialize(replaceVersion(jsonTree, version).toJsonString()); + } + + private static LDValue replaceVersion(LDValue objectValue, int version) { + ObjectBuilder b = LDValue.buildObject(); + for (String key: objectValue.keys()) { + b.put(key, objectValue.get(key)); + } + b.put("version", version); + return b.build(); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/Hook.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/Hook.java new file mode 100644 index 0000000..745d723 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/Hook.java @@ -0,0 +1,94 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.FlagsStateOption; + +import java.util.Map; + +/** + * A Hook is a set of user-defined callbacks that are executed by the SDK at various points of interest. To create + * your own hook with customized logic, implement the {@link Hook} interface. + *

    + * Hook currently defines an "evaluation" series, which is composed of two stages: "beforeEvaluation" and + * "afterEvaluation". These are executed by the SDK before and after the evaluation of a feature flag. + *

    + * Multiple hooks may be configured in the SDK. By default, the SDK will execute each hook's beforeEvaluation + * stage in the order they were configured, and afterEvaluation in reverse order. (i.e. myHook1.beforeEvaluation, + * myHook2.beforeEvaluation, myHook2.afterEvaluation, myHook1.afterEvaluation) + */ +public abstract class Hook { + + private final HookMetadata metadata; + + /** + * @return the hooks metadata + */ + public HookMetadata getMetadata() { + return metadata; + } + + /** + * Creates an instance of {@link Hook} with the given name which will be put into its metadata. + * + * @param name a friendly naem for the hooks + */ + public Hook(String name) { + metadata = new HookMetadata(name) {}; + } + + /** + * {@link #beforeEvaluation(EvaluationSeriesContext, Map)} is executed by the SDK at the start of the evaluation of + * a feature flag. It will not be executed as part of a call to + * {@link com.launchdarkly.sdk.server.LDClient#allFlagsState(LDContext, FlagsStateOption...)}. + *

    + * To provide custom data to the series which will be given back to your {@link Hook} at the next stage of the + * series, return a map containing the custom data. You should initialize this map from the {@code seriesData}. + * + *

    +   * {@code
    +   * HashMap customData = new HashMap<>(seriesData);
    +   * customData.put("foo", "bar");
    +   * return Collections.unmodifiableMap(customData);
    +   * }
    +   * 
    + * + * @param seriesContext container of parameters associated with this evaluation + * @param seriesData immutable data from the previous stage in evaluation series. {@link #beforeEvaluation(EvaluationSeriesContext, Map)} + * is the first stage in this series, so this will be an immutable empty map. + * @return a map containing custom data that will be carried through to the next stage of the series + */ + public Map beforeEvaluation(EvaluationSeriesContext seriesContext, Map seriesData) { + // default implementation is no-op + return seriesData; + } + + /** + * {@link #afterEvaluation(EvaluationSeriesContext, Map, EvaluationDetail)} is executed by the SDK at the after the + * evaluation of a feature flag. It will not be executed as part of a call to + * {@link com.launchdarkly.sdk.server.LDClient#allFlagsState(LDContext, FlagsStateOption...)}. + *

    + * This is currently the last stage of the evaluation series in the {@link Hook}, but that may not be the case in + * the future. To ensure forward compatibility, return the {@code seriesData} unmodified. + * + *

    +   * {@code
    +   * String value = (String) seriesData.get("foo");
    +   * doAThing(value);
    +   * return seriesData;
    +   * }
    +   * 
    + * + * @param seriesContext container of parameters associated with this evaluation + * @param seriesData immutable data from the previous stage in evaluation series. {@link #beforeEvaluation(EvaluationSeriesContext, Map)} + * is the first stage in this series, so this will be an immutable empty map. + * @param evaluationDetail the result of the evaluation that took place before this hook was invoked + * @return a map containing custom data that will be carried through to the next stage of the series (if added in the future) + */ + public Map afterEvaluation(EvaluationSeriesContext seriesContext, Map seriesData, + EvaluationDetail evaluationDetail) { + // default implementation is no-op + return seriesData; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HookMetadata.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HookMetadata.java new file mode 100644 index 0000000..bdb6c50 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HookMetadata.java @@ -0,0 +1,20 @@ +package com.launchdarkly.sdk.server.integrations; + +/** + * Metadata about the {@link Hook} implementation. + */ +public abstract class HookMetadata { + + private final String name; + + public HookMetadata(String name) { + this.name = name; + } + + /** + * @return a friendly name for the {@link Hook} this {@link HookMetadata} belongs to. + */ + public String getName() { + return name; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HooksConfigurationBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HooksConfigurationBuilder.java new file mode 100644 index 0000000..45357d8 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HooksConfigurationBuilder.java @@ -0,0 +1,52 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.subsystems.HookConfiguration; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Contains methods for configuring the SDK's 'hooks'. + *

    + * If you want to add hooks, use {@link Components#hooks()}, configure accordingly, and pass it + * to {@link com.launchdarkly.sdk.server.LDConfig.Builder#hooks(HooksConfigurationBuilder)}. + * + *

    
    + *     List hooks = createSomeHooks();
    + *     LDConfig config = new LDConfig.Builder()
    + *         .hooks(
    + *             Components.hooks()
    + *                 .setHooks(hooks)
    + *         )
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#hooks()}. + */ +public abstract class HooksConfigurationBuilder { + + /** + * The current set of hooks the builder has. + */ + protected List hooks = Collections.emptyList(); + + /** + * Adds the provided list of hooks to the configuration. Note that the order of hooks is important and controls + * the order in which they will be executed. See {@link Hook} for more details. + * + * @param hooks to be added to the configuration + * @return the builder + */ + public HooksConfigurationBuilder setHooks(List hooks) { + // copy to avoid list manipulations impacting the SDK + this.hooks = Collections.unmodifiableList(new ArrayList<>(hooks)); + return this; + } + + /** + * @return the hooks configuration + */ + abstract public HookConfiguration build(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilder.java new file mode 100644 index 0000000..6bb569b --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilder.java @@ -0,0 +1,172 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; + +import javax.net.SocketFactory; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.X509TrustManager; + +/** + * Contains methods for configuring the SDK's networking behavior. + *

    + * If you want to set non-default values for any of these properties, create a builder with + * {@link Components#httpConfiguration()}, change its properties with the methods of this class, + * and pass it to {@link com.launchdarkly.sdk.server.LDConfig.Builder#http(ComponentConfigurer)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .http(
    + *           Components.httpConfiguration()
    + *             .connectTimeoutMillis(3000)
    + *             .proxyHostAndPort("my-proxy", 8080)
    + *          )
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#httpConfiguration()}. + * + * @since 4.13.0 + */ +public abstract class HttpConfigurationBuilder implements ComponentConfigurer { + /** + * The default value for {@link #connectTimeout(Duration)}: two seconds. + */ + public static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(2); + + /** + * The default value for {@link #socketTimeout(Duration)}: 10 seconds. + */ + public static final Duration DEFAULT_SOCKET_TIMEOUT = Duration.ofSeconds(10); + + protected Duration connectTimeout = DEFAULT_CONNECT_TIMEOUT; + protected HttpAuthentication proxyAuth; + protected String proxyHost; + protected Map customHeaders = new HashMap<>(); + protected int proxyPort; + protected Duration socketTimeout = DEFAULT_SOCKET_TIMEOUT; + protected SocketFactory socketFactory; + protected SSLSocketFactory sslSocketFactory; + protected X509TrustManager trustManager; + protected String wrapperName; + protected String wrapperVersion; + + /** + * Sets the connection timeout. This is the time allowed for the SDK to make a socket connection to + * any of the LaunchDarkly services. + *

    + * The default is {@link #DEFAULT_CONNECT_TIMEOUT}. + * + * @param connectTimeout the connection timeout; null to use the default + * @return the builder + */ + public HttpConfigurationBuilder connectTimeout(Duration connectTimeout) { + this.connectTimeout = connectTimeout == null ? DEFAULT_CONNECT_TIMEOUT : connectTimeout; + return this; + } + + /** + * Sets an HTTP proxy for making connections to LaunchDarkly. + * + * @param host the proxy hostname + * @param port the proxy port + * @return the builder + */ + public HttpConfigurationBuilder proxyHostAndPort(String host, int port) { + this.proxyHost = host; + this.proxyPort = port; + return this; + } + + /** + * Sets an authentication strategy for use with an HTTP proxy. This has no effect unless a proxy + * was specified with {@link #proxyHostAndPort(String, int)}. + * + * @param strategy the authentication strategy + * @return the builder + */ + public HttpConfigurationBuilder proxyAuth(HttpAuthentication strategy) { + this.proxyAuth = strategy; + return this; + } + + /** + * Sets the socket timeout. This is the amount of time without receiving data on a connection that the + * SDK will tolerate before signaling an error. This does not apply to the streaming connection + * used by {@link com.launchdarkly.sdk.server.Components#streamingDataSource()}, which has its own + * non-configurable read timeout based on the expected behavior of the LaunchDarkly streaming service. + *

    + * The default is {@link #DEFAULT_SOCKET_TIMEOUT}. + * + * @param socketTimeout the socket timeout; null to use the default + * @return the builder + */ + public HttpConfigurationBuilder socketTimeout(Duration socketTimeout) { + this.socketTimeout = socketTimeout == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeout; + return this; + } + + /** + * Specifies a custom socket configuration for HTTP connections to LaunchDarkly. + *

    + * This uses the standard Java interfaces for configuring socket connections. + * + * @param socketFactory the socket factory + * @return the builder + */ + public HttpConfigurationBuilder socketFactory(SocketFactory socketFactory) { + this.socketFactory = socketFactory; + return this; + } + + /** + * Specifies a custom security configuration for HTTPS connections to LaunchDarkly. + *

    + * This uses the standard Java interfaces for configuring secure socket connections and certificate + * verification. + * + * @param sslSocketFactory the SSL socket factory + * @param trustManager the trust manager + * @return the builder + */ + public HttpConfigurationBuilder sslSocketFactory(SSLSocketFactory sslSocketFactory, X509TrustManager trustManager) { + this.sslSocketFactory = sslSocketFactory; + this.trustManager = trustManager; + return this; + } + + /** + * Specifies a custom HTTP header that should be added to all SDK requests. + *

    + * This may be helpful if you are using a gateway or proxy server that requires a specific header in requests. You + * may add any number of headers. + * + * @param headerName standard HTTP header + * @param headerValue standard HTTP value + * @return the builder + */ + public HttpConfigurationBuilder addCustomHeader(String headerName, String headerValue) { + this.customHeaders.put(headerName, headerValue); + return this; + } + + /** + * For use by wrapper libraries to set an identifying name for the wrapper being used. This will be included in a + * header during requests to the LaunchDarkly servers to allow recording metrics on the usage of + * these wrapper libraries. + * + * @param wrapperName an identifying name for the wrapper library + * @param wrapperVersion version string for the wrapper library + * @return the builder + */ + public HttpConfigurationBuilder wrapper(String wrapperName, String wrapperVersion) { + this.wrapperName = wrapperName; + this.wrapperVersion = wrapperVersion; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilder.java new file mode 100644 index 0000000..9eca6eb --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilder.java @@ -0,0 +1,164 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import java.time.Duration; + +/** + * Contains methods for configuring the SDK's logging behavior. + *

    + * If you want to set non-default values for any of these properties, create a builder with + * {@link Components#logging()}, change its properties with the methods of this class, and pass it + * to {@link com.launchdarkly.sdk.server.LDConfig.Builder#logging(ComponentConfigurer)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .logging(
    + *           Components.logging()
    + *             .logDataSourceOutageAsErrorAfter(Duration.ofSeconds(120))
    + *          )
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#logging()}. + * + * @since 5.0.0 + */ +public abstract class LoggingConfigurationBuilder implements ComponentConfigurer { + /** + * The default value for {@link #logDataSourceOutageAsErrorAfter(Duration)}: one minute. + */ + public static final Duration DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER = Duration.ofMinutes(1); + + protected String baseName = null; + protected Duration logDataSourceOutageAsErrorAfter = DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER; + protected LDLogAdapter logAdapter = null; + protected LDLogLevel minimumLevel = null; + + /** + * Specifies the implementation of logging to use. + *

    + * The com.launchdarkly.logging + * API defines the {@link LDLogAdapter} interface to specify where log output should be sent. + *

    + * The default logging destination, if no adapter is specified, depends on whether + * SLF4J is present in the classpath. If it is, then the SDK uses + * {@link com.launchdarkly.logging.LDSLF4J#adapter()}, causing output to go to SLF4J; what happens to + * the output then is determined by the SLF4J configuration. If SLF4J is not present in the classpath, + * the SDK uses {@link Logs#toConsole()} instead, causing output to go to the {@code System.err} stream. + *

    + * You may use the {@link com.launchdarkly.logging.Logs} factory methods, or a custom implementation, + * to handle log output differently. For instance, you may specify + * {@link com.launchdarkly.logging.Logs#toJavaUtilLogging()} to use the java.util.logging + * framework. + *

    + * For more about logging adapters, + * see the SDK reference guide + * and the API documentation for + * com.launchdarkly.logging. + *

    + * If you don't need to customize any options other than the adapter, you can call + * {@link Components#logging(LDLogAdapter)} as a shortcut rather than using + * {@link LoggingConfigurationBuilder}. + * + * @param logAdapter an {@link LDLogAdapter} for the desired logging implementation + * @return the builder + * @since 5.10.0 + */ + public LoggingConfigurationBuilder adapter(LDLogAdapter logAdapter) { + this.logAdapter = logAdapter; + return this; + } + + /** + * Specifies a custom base logger name. + *

    + * Logger names are used to give context to the log output, indicating that it is from the + * LaunchDarkly SDK instead of another component, or indicating a more specific area of + * functionality within the SDK. Many logging implementations show the logger name in + * in brackets, for instance: + *

    
    +   *     [com.launchdarkly.sdk.LDClient] INFO: Reconnected to LaunchDarkly stream
    +   * 
    + *

    + * If you are using an adapter for a third-party logging framework such as SLF4J (see + * {@link #adapter(LDLogAdapter)}), most frameworks have a mechanism for filtering log + * output by the logger name. + *

    + * By default, the SDK uses a base logger name of com.launchdarkly.sdk.LDClient. + * Messages will be logged either under this name, or with a suffix to indicate what + * general area of functionality is involved: + *

      + *
    • .DataSource: problems or status messages regarding how the SDK gets + * feature flag data from LaunchDarkly.
    • + *
    • .DataStore: problems or status messages regarding how the SDK stores its + * feature flag data (for instance, if you are using a database).
    • + *
    • .Evaluation: problems in evaluating a feature flag or flags, which were + * caused by invalid flag data or incorrect usage of the SDK rather than for instance a + * database problem.
    • + *
    • .Events problems or status messages regarding the SDK's delivery of + * analytics event data to LaunchDarkly.
    • + *
    + *

    + * Setting {@link #baseLoggerName(String)} to a non-null value overrides the default. The + * SDK still adds the same suffixes to the name, so for instance if you set it to + * "LD", the example message above would show [LD.DataSource]. + * + * @param name the base logger name + * @return the builder + * @since 5.10.0 + */ + public LoggingConfigurationBuilder baseLoggerName(String name) { + this.baseName = name; + return this; + } + + /** + * Specifies the lowest level of logging to enable. + *

    + * This is only applicable when using an implementation of logging that does not have its own + * external configuration mechanism, such as {@link Logs#toConsole()}. It adds a log level filter + * so that log messages at lower levels are suppressed. For instance, setting the minimum level to + * {@link LDLogLevel#INFO} means that DEBUG-level output is disabled. If not specified, + * the default minimum level is {@link LDLogLevel#INFO}. + *

    + * When using a logging framework like SLF4J or {@code java.util.logging} that has its own + * separate mechanism for log filtering, you must use that framework's configuration options for + * log levels; calling {@link #level(LDLogLevel)} in that case has no effect. + * + * @param minimumLevel the lowest level of logging to enable + * @return the builder + * @since 5.10.0 + */ + public LoggingConfigurationBuilder level(LDLogLevel minimumLevel) { + this.minimumLevel = minimumLevel; + return this; + } + + /** + * Sets the time threshold, if any, after which the SDK will log a data source outage at {@code ERROR} + * level instead of {@code WARN} level. + *

    + * A data source outage means that an error condition, such as a network interruption or an error from + * the LaunchDarkly service, is preventing the SDK from receiving feature flag updates. Many outages are + * brief and the SDK can recover from them quickly; in that case it may be undesirable to log an + * {@code ERROR} line, which might trigger an unwanted automated alert depending on your monitoring + * tools. So, by default, the SDK logs such errors at {@code WARN} level. However, if the amount of time + * specified by this method elapses before the data source starts working again, the SDK will log an + * additional message at {@code ERROR} level to indicate that this is a sustained problem. + *

    + * The default is {@link #DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER}. Setting it to {@code null} + * will disable this feature, so you will only get {@code WARN} messages. + * + * @param logDataSourceOutageAsErrorAfter the error logging threshold, or null + * @return the builder + */ + public LoggingConfigurationBuilder logDataSourceOutageAsErrorAfter(Duration logDataSourceOutageAsErrorAfter) { + this.logDataSourceOutageAsErrorAfter = logDataSourceOutageAsErrorAfter; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilder.java new file mode 100644 index 0000000..406be59 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilder.java @@ -0,0 +1,198 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +/** + * A configurable factory for a persistent data store. + *

    + * Several database integrations exist for the LaunchDarkly SDK, each with its own behavior and options + * specific to that database; this is described via some implementation of {@link PersistentDataStore}. + * There is also universal behavior that the SDK provides for all persistent data stores, such as caching; + * the {@link PersistentDataStoreBuilder} adds this. + *

    + * After configuring this object, pass it to {@link Builder#dataStore(ComponentConfigurer)} + * to use it in the SDK configuration. For example, using the Redis integration: + * + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .dataStore(
    + *             Components.persistentDataStore(
    + *                 Redis.dataStore().url("redis://my-redis-host")
    + *             ).cacheSeconds(15)
    + *         )
    + *         .build();
    + * 
    + * + * In this example, {@code .url()} is an option specifically for the Redis integration, whereas + * {@code cacheSeconds()} is an option that can be used for any persistent data store. + *

    + * Note that this class is abstract; the actual implementation is created by calling + * {@link Components#persistentDataStore(ComponentConfigurer)}. + * @since 4.12.0 + */ +public abstract class PersistentDataStoreBuilder implements ComponentConfigurer { + /** + * The default value for the cache TTL. + */ + public static final Duration DEFAULT_CACHE_TTL = Duration.ofSeconds(15); + + protected final ComponentConfigurer persistentDataStoreConfigurer; // see Components for why these are not private + protected Duration cacheTime = DEFAULT_CACHE_TTL; + protected StaleValuesPolicy staleValuesPolicy = StaleValuesPolicy.EVICT; + protected boolean recordCacheStats = false; + + /** + * Possible values for {@link #staleValuesPolicy(StaleValuesPolicy)}. + */ + public enum StaleValuesPolicy { + /** + * Indicates that when the cache TTL expires for an item, it is evicted from the cache. The next + * attempt to read that item causes a synchronous read from the underlying data store; if that + * fails, no value is available. This is the default behavior. + * + * @see com.google.common.cache.CacheBuilder#expireAfterWrite(long, TimeUnit) + */ + EVICT, + /** + * Indicates that the cache should refresh stale values instead of evicting them. + *

    + * In this mode, an attempt to read an expired item causes a synchronous read from the underlying + * data store, like {@link #EVICT}--but if an error occurs during this refresh, the cache will + * continue to return the previously cached values (if any). This is useful if you prefer the most + * recently cached feature rule set to be returned for evaluation over the default value when + * updates go wrong. + *

    + * See: CacheBuilder + * for more specific information on cache semantics. This mode is equivalent to {@code expireAfterWrite}. + */ + REFRESH, + /** + * Indicates that the cache should refresh stale values asynchronously instead of evicting them. + *

    + * This is the same as {@link #REFRESH}, except that the attempt to refresh the value is done + * on another thread (using a {@link java.util.concurrent.Executor}). In the meantime, the cache + * will continue to return the previously cached value (if any) in a non-blocking fashion to threads + * requesting the stale key. Any exception encountered during the asynchronous reload will cause + * the previously cached value to be retained. + *

    + * This setting is ideal to enable when you desire high performance reads and can accept returning + * stale values for the period of the async refresh. For example, configuring this feature store + * with a very low cache time and enabling this feature would see great performance benefit by + * decoupling calls from network I/O. + *

    + * See: CacheBuilder for + * more specific information on cache semantics. + */ + REFRESH_ASYNC + }; + + /** + * Creates a new builder. + * + * @param persistentDataStoreConfigurer the factory implementation for the specific data store type + */ + protected PersistentDataStoreBuilder(ComponentConfigurer persistentDataStoreConfigurer) { + this.persistentDataStoreConfigurer = persistentDataStoreConfigurer; + } + + /** + * Specifies that the SDK should not use an in-memory cache for the persistent data store. + * This means that every feature flag evaluation will trigger a data store query. + * + * @return the builder + */ + public PersistentDataStoreBuilder noCaching() { + return cacheTime(Duration.ZERO); + } + + /** + * Specifies the cache TTL. Items will be evicted or refreshed (depending on the StaleValuesPolicy) + * after this amount of time from the time when they were originally cached. + *

    + * If the value is zero, caching is disabled (equivalent to {@link #noCaching()}). + *

    + * If the value is negative, data is cached forever (equivalent to {@link #cacheForever()}). + * + * @param cacheTime the cache TTL; null to use the default + * @return the builder + */ + public PersistentDataStoreBuilder cacheTime(Duration cacheTime) { + this.cacheTime = cacheTime == null ? DEFAULT_CACHE_TTL : cacheTime; + return this; + } + + /** + * Shortcut for calling {@link #cacheTime(Duration)} with a duration in milliseconds. + * + * @param millis the cache TTL in milliseconds + * @return the builder + */ + public PersistentDataStoreBuilder cacheMillis(long millis) { + return cacheTime(Duration.ofMillis(millis)); + } + + /** + * Shortcut for calling {@link #cacheTime(Duration)} with a duration in seconds. + * + * @param seconds the cache TTL in seconds + * @return the builder + */ + public PersistentDataStoreBuilder cacheSeconds(long seconds) { + return cacheTime(Duration.ofSeconds(seconds)); + } + + /** + * Specifies that the in-memory cache should never expire. In this mode, data will be written + * to both the underlying persistent store and the cache, but will only ever be read from + * the persistent store if the SDK is restarted. + *

    + * Use this mode with caution: it means that in a scenario where multiple processes are sharing + * the database, and the current process loses connectivity to LaunchDarkly while other processes + * are still receiving updates and writing them to the database, the current process will have + * stale data. + * + * @return the builder + */ + public PersistentDataStoreBuilder cacheForever() { + return cacheTime(Duration.ofMillis(-1)); + } + + /** + * Specifies how the cache (if any) should deal with old values when the cache TTL expires. The default + * is {@link StaleValuesPolicy#EVICT}. This property has no effect if caching is disabled. + * + * @param staleValuesPolicy a {@link StaleValuesPolicy} constant + * @return the builder + */ + public PersistentDataStoreBuilder staleValuesPolicy(StaleValuesPolicy staleValuesPolicy) { + this.staleValuesPolicy = staleValuesPolicy == null ? StaleValuesPolicy.EVICT : staleValuesPolicy; + return this; + } + + /** + * Enables monitoring of the in-memory cache. + *

    + * If set to true, this makes caching statistics available through the {@link DataStoreStatusProvider} + * that you can obtain from the client instance. This may slightly decrease performance, due to the + * need to record statistics for each cache operation. + *

    + * By default, it is false: statistics will not be recorded and the {@link DataStoreStatusProvider#getCacheStats()} + * method will return null. + * + * @param recordCacheStats true to record caching statiistics + * @return the builder + * @since 5.0.0 + */ + public PersistentDataStoreBuilder recordCacheStats(boolean recordCacheStats) { + this.recordCacheStats = recordCacheStats; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilder.java new file mode 100644 index 0000000..7c73563 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilder.java @@ -0,0 +1,69 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; + +import java.time.Duration; + +/** + * Contains methods for configuring the polling data source. + *

    + * Polling is not the default behavior; by default, the SDK uses a streaming connection to receive feature flag + * data from LaunchDarkly. In polling mode, the SDK instead makes a new HTTP request to LaunchDarkly at regular + * intervals. HTTP caching allows it to avoid redundantly downloading data if there have been no changes, but + * polling is still less efficient than streaming and should only be used on the advice of LaunchDarkly support. + *

    + * To use polling mode, create a builder with {@link Components#pollingDataSource()}, + * change its properties with the methods of this class, and pass it to {@link Builder#dataSource(ComponentConfigurer)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .dataSource(Components.pollingDataSource().pollInterval(Duration.ofSeconds(45)))
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#pollingDataSource()}. + * + * @since 4.12.0 + */ +public abstract class PollingDataSourceBuilder implements ComponentConfigurer { + /** + * The default and minimum value for {@link #pollInterval(Duration)}: 30 seconds. + */ + public static final Duration DEFAULT_POLL_INTERVAL = Duration.ofSeconds(30); + + protected Duration pollInterval = DEFAULT_POLL_INTERVAL; + + protected String payloadFilter; + + /** + * Sets the interval at which the SDK will poll for feature flag updates. + *

    + * The default and minimum value is {@link #DEFAULT_POLL_INTERVAL}. Values less than this will be + * set to the default. + * + * @param pollInterval the polling interval; null to use the default + * @return the builder + */ + public PollingDataSourceBuilder pollInterval(Duration pollInterval) { + if (pollInterval == null) { + this.pollInterval = DEFAULT_POLL_INTERVAL; + } else { + this.pollInterval = pollInterval.compareTo(DEFAULT_POLL_INTERVAL) < 0 ? DEFAULT_POLL_INTERVAL : pollInterval; + } + return this; + } + + /** + * Sets the Payload Filter that will be used to filter the objects (flags, segments, etc.) + * from this data source. + * + * @param payloadFilter the filter to be used + * @return the builder + */ + public PollingDataSourceBuilder payloadFilter(String payloadFilter) { + this.payloadFilter = payloadFilter; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilder.java new file mode 100644 index 0000000..e0c0458 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilder.java @@ -0,0 +1,203 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints; +import java.net.URI; + +/** + * Contains methods for configuring the SDK's service URIs. + *

    + * If you want to set non-default values for any of these properties, create a builder with {@link Components#serviceEndpoints()}, + * change its properties with the methods of this class, and pass it to {@link LDConfig.Builder#serviceEndpoints(ServiceEndpointsBuilder)}. + *

    + * The default behavior, if you do not change any of these properties, is that the SDK will connect to the standard endpoints + * in the LaunchDarkly production service. There are several use cases for changing these properties: + *

      + *
    • You are using the LaunchDarkly Relay Proxy. + * In this case, set {@link #relayProxy(URI)}. + *
    • You are connecting to a private instance of LaunchDarkly, rather than the standard production services. + * In this case, there will be custom base URIs for each service, so you must set {@link #streaming(URI)}, + * {@link #polling(URI)}, and {@link #events(URI)}. + *
    • You are connecting to a test fixture that simulates the service endpoints. In this case, you may set the + * base URIs to whatever you want, although the SDK will still set the URI paths to the expected paths for + * LaunchDarkly services. + *
    + *

    + * Each of the setter methods can be called with either a {@link URI} or an equivalent string. + * Passing a string that is not a valid URI will cause an immediate {@link IllegalArgumentException}. + *

    + * If you are using a private instance and you set some of the base URIs, but not all of them, the SDK + * will log an error and may not work properly. The only exception is if you have explicitly disabled + * the SDK's use of one of the services: for instance, if you have disabled analytics events with + * {@link Components#noEvents()}, you do not have to set {@link #events(URI)}. + * + *

    
    + *     // Example of specifying a Relay Proxy instance
    + *     LDConfig config = new LDConfig.Builder()
    + *         .serviceEndpoints(
    + *             Components.serviceEndpoints()
    + *                 .relayProxy("http://my-relay-hostname:80")
    + *         )
    + *         .build();
    + * 
    + *     // Example of specifying a private LaunchDarkly instance
    + *     LDConfig config = new LDConfig.Builder()
    + *         .serviceEndpoints(
    + *             Components.serviceEndpoints()
    + *                 .streaming("https://stream.mycompany.launchdarkly.com")
    + *                 .polling("https://app.mycompany.launchdarkly.com")
    + *                 .events("https://events.mycompany.launchdarkly.com"))
    + *         )
    + *         .build();
    + * 
    + * + * @since 5.9.0 + */ +public abstract class ServiceEndpointsBuilder { + protected URI streamingBaseUri; + protected URI pollingBaseUri; + protected URI eventsBaseUri; + + /** + * Sets a custom base URI for the events service. + *

    + * You should only call this method if you are using a private instance or test fixture + * (see {@link ServiceEndpointsBuilder}). If you are using the LaunchDarkly Relay Proxy, + * call {@link #relayProxy(URI)} instead. + *

    
    +   *     LDConfig config = new LDConfig.Builder()
    +   *       .serviceEndpoints(
    +   *           Components.serviceEndpoints()
    +   *               .streaming("https://stream.mycompany.launchdarkly.com")
    +   *               .polling("https://app.mycompany.launchdarkly.com")
    +   *               .events("https://events.mycompany.launchdarkly.com")
    +   *       )
    +   *       .build();
    +   * 
    + * + * @param eventsBaseUri the base URI of the events service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder events(URI eventsBaseUri) { + this.eventsBaseUri = eventsBaseUri; + return this; + } + + /** + * Equivalent to {@link #events(URI)}, specifying the URI as a string. + * @param eventsBaseUri the base URI of the events service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder events(String eventsBaseUri) { + return events(eventsBaseUri == null ? null : URI.create(eventsBaseUri)); + } + + /** + * Sets a custom base URI for the polling service. + *

    + * You should only call this method if you are using a private instance or test fixture + * (see {@link ServiceEndpointsBuilder}). If you are using the LaunchDarkly Relay Proxy, + * call {@link #relayProxy(URI)} instead. + *

    
    +   *     LDConfig config = new LDConfig.Builder()
    +   *       .serviceEndpoints(
    +   *           Components.serviceEndpoints()
    +   *               .streaming("https://stream.mycompany.launchdarkly.com")
    +   *               .polling("https://app.mycompany.launchdarkly.com")
    +   *               .events("https://events.mycompany.launchdarkly.com")
    +   *       )
    +   *       .build();
    +   * 
    + * + * @param pollingBaseUri the base URI of the polling service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder polling(URI pollingBaseUri) { + this.pollingBaseUri = pollingBaseUri; + return this; + } + + /** + * Equivalent to {@link #polling(URI)}, specifying the URI as a string. + * @param pollingBaseUri the base URI of the events service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder polling(String pollingBaseUri) { + return polling(pollingBaseUri == null ? null : URI.create(pollingBaseUri)); + } + + /** + * Specifies a single base URI for a Relay Proxy instance. + *

    + * When using the LaunchDarkly Relay Proxy, the SDK only needs to know the single base URI + * of the Relay Proxy, which will provide all the proxied service endpoints. + *

    
    +   *     LDConfig config = new LDConfig.Builder()
    +   *       .serviceEndpoints(
    +   *           Components.serviceEndpoints()
    +   *               .relayProxy("http://my-relay-hostname:8080")
    +   *       )
    +   *       .build();
    +   * 
    + * + * @param relayProxyBaseUri the Relay Proxy base URI, or null to reset to default endpoints + * @return the builder + */ + public ServiceEndpointsBuilder relayProxy(URI relayProxyBaseUri) { + this.eventsBaseUri = relayProxyBaseUri; + this.pollingBaseUri = relayProxyBaseUri; + this.streamingBaseUri = relayProxyBaseUri; + return this; + } + + /** + * Equivalent to {@link #relayProxy(URI)}, specifying the URI as a string. + * @param relayProxyBaseUri the Relay Proxy base URI, or null to reset to default endpoints + * @return the builder + */ + public ServiceEndpointsBuilder relayProxy(String relayProxyBaseUri) { + return relayProxy(relayProxyBaseUri == null ? null : URI.create(relayProxyBaseUri)); + } + + /** + * Sets a custom base URI for the streaming service. + *

    + * You should only call this method if you are using a private instance or test fixture + * (see {@link ServiceEndpointsBuilder}). If you are using the LaunchDarkly Relay Proxy, + * call {@link #relayProxy(URI)} instead. + *

    
    +   *     LDConfig config = new LDConfig.Builder()
    +   *       .serviceEndpoints(
    +   *           Components.serviceEndpoints()
    +   *               .streaming("https://stream.mycompany.launchdarkly.com")
    +   *               .polling("https://app.mycompany.launchdarkly.com")
    +   *               .events("https://events.mycompany.launchdarkly.com")
    +   *       )
    +   *       .build();
    +   * 
    + * + * @param streamingBaseUri the base URI of the streaming service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder streaming(URI streamingBaseUri) { + this.streamingBaseUri = streamingBaseUri; + return this; + } + + /** + * Equivalent to {@link #streaming(URI)}, specifying the URI as a string. + * @param streamingBaseUri the base URI of the events service; null to use the default + * @return the builder + */ + public ServiceEndpointsBuilder streaming(String streamingBaseUri) { + return streaming(streamingBaseUri == null ? null : URI.create(streamingBaseUri)); + } + + /** + * Called internally by the SDK to create a configuration instance. Applications do not need + * to call this method. + * @return the configuration object + */ + abstract public ServiceEndpoints createServiceEndpoints(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilder.java new file mode 100644 index 0000000..2eb7f63 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilder.java @@ -0,0 +1,65 @@ +package com.launchdarkly.sdk.server.integrations; + +import java.time.Duration; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; + +/** + * Contains methods for configuring the streaming data source. + *

    + * By default, the SDK uses a streaming connection to receive feature flag data from LaunchDarkly. If you want + * to customize the behavior of the connection, create a builder with {@link Components#streamingDataSource()}, + * change its properties with the methods of this class, and pass it to {@link Builder#dataSource(ComponentConfigurer)}: + *

    
    + *     LDConfig config = new LDConfig.Builder()
    + *         .dataSource(Components.streamingDataSource().initialReconnectDelayMillis(500))
    + *         .build();
    + * 
    + *

    + * Note that this class is abstract; the actual implementation is created by calling {@link Components#streamingDataSource()}. + * + * @since 4.12.0 + */ +public abstract class StreamingDataSourceBuilder implements ComponentConfigurer { + /** + * The default value for {@link #initialReconnectDelay(Duration)}: 1000 milliseconds. + */ + public static final Duration DEFAULT_INITIAL_RECONNECT_DELAY = Duration.ofMillis(1000); + + protected Duration initialReconnectDelay = DEFAULT_INITIAL_RECONNECT_DELAY; + + protected String payloadFilter; + + /** + * Sets the initial reconnect delay for the streaming connection. + *

    + * The streaming service uses a backoff algorithm (with jitter) every time the connection needs + * to be reestablished. The delay for the first reconnection will start near this value, and then + * increase exponentially for any subsequent connection failures. + *

    + * The default value is {@link #DEFAULT_INITIAL_RECONNECT_DELAY}. + * + * @param initialReconnectDelay the reconnect time base value; null to use the default + * @return the builder + */ + + public StreamingDataSourceBuilder initialReconnectDelay(Duration initialReconnectDelay) { + this.initialReconnectDelay = initialReconnectDelay == null ? DEFAULT_INITIAL_RECONNECT_DELAY : initialReconnectDelay; + return this; + } + + /** + * Sets the Payload Filter that will be used to filter the objects (flags, segments, etc.) + * from this data source. + * + * @param payloadFilter the filter to be used + * @return the builder + */ + public StreamingDataSourceBuilder payloadFilter(String payloadFilter) { + this.payloadFilter = payloadFilter; + return this; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/TestData.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/TestData.java new file mode 100644 index 0000000..76e4bd2 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/TestData.java @@ -0,0 +1,931 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableSortedSet; +import com.google.common.collect.Iterables; +import com.launchdarkly.sdk.ArrayBuilder; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.server.DataModel; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Future; + +import static java.util.concurrent.CompletableFuture.completedFuture; + +/** + * A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK + * client in test scenarios. + *

    + * Unlike {@link FileData}, this mechanism does not use any external resources. It provides only + * the data that the application has put into it using the {@link #update(FlagBuilder)} method. + * + *

    
    + *     TestData td = TestData.dataSource();
    + *     td.update(testData.flag("flag-key-1").booleanFlag().variationForAllUsers(true));
    + *     
    + *     LDConfig config = new LDConfig.Builder()
    + *         .dataSource(td)
    + *         .build();
    + *     LDClient client = new LDClient(sdkKey, config);
    + *     
    + *     // flags can be updated at any time:
    + *     td.update(testData.flag("flag-key-2")
    + *         .variationForUser("some-user-key", true)
    + *         .fallthroughVariation(false));
    + * 
    + * + * The above example uses a simple boolean flag, but more complex configurations are possible using + * the methods of the {@link FlagBuilder} that is returned by {@link #flag(String)}. {@link FlagBuilder} + * supports many of the ways a flag can be configured on the LaunchDarkly dashboard, but does not + * currently support 1. rule operators other than "in" and "not in", or 2. percentage rollouts. + *

    + * If the same {@code TestData} instance is used to configure multiple {@code LDClient} instances, + * any changes made to the data will propagate to all of the {@code LDClient}s. + * + * @since 5.1.0 + * @see FileData + */ +public final class TestData implements ComponentConfigurer { + private final Object lock = new Object(); + private final Map currentFlags = new HashMap<>(); + private final Map currentBuilders = new HashMap<>(); + private final List instances = new CopyOnWriteArrayList<>(); + + /** + * Creates a new instance of the test data source. + *

    + * See {@link TestData} for details. + * + * @return a new configurable test data source + */ + public static TestData dataSource() { + return new TestData(); + } + + private TestData() {} + + /** + * Creates or copies a {@link FlagBuilder} for building a test flag configuration. + *

    + * If this flag key has already been defined in this {@code TestData} instance, then the builder + * starts with the same configuration that was last provided for this flag. + *

    + * Otherwise, it starts with a new default configuration in which the flag has {@code true} and + * {@code false} variations, is {@code true} for all users when targeting is turned on and + * {@code false} otherwise, and currently has targeting turned on. You can change any of those + * properties, and provide more complex behavior, using the {@link FlagBuilder} methods. + *

    + * Once you have set the desired configuration, pass the builder to {@link #update(FlagBuilder)}. + * + * @param key the flag key + * @return a flag configuration builder + * @see #update(FlagBuilder) + */ + public FlagBuilder flag(String key) { + FlagBuilder existingBuilder; + synchronized (lock) { + existingBuilder = currentBuilders.get(key); + } + if (existingBuilder != null) { + return new FlagBuilder(existingBuilder); + } + return new FlagBuilder(key).booleanFlag(); + } + + /** + * Updates the test data with the specified flag configuration. + *

    + * This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. + * It immediately propagates the flag change to any {@code LDClient} instance(s) that you have + * already configured to use this {@code TestData}. If no {@code LDClient} has been started yet, + * it simply adds this flag to the test data which will be provided to any {@code LDClient} that + * you subsequently configure. + *

    + * Any subsequent changes to this {@link FlagBuilder} instance do not affect the test data, + * unless you call {@link #update(FlagBuilder)} again. + * + * @param flagBuilder a flag configuration builder + * @return the same {@code TestData} instance + * @see #flag(String) + */ + public TestData update(FlagBuilder flagBuilder) { + String key = flagBuilder.key; + FlagBuilder clonedBuilder = new FlagBuilder(flagBuilder); + ItemDescriptor newItem = null; + + synchronized (lock) { + ItemDescriptor oldItem = currentFlags.get(key); + int oldVersion = oldItem == null ? 0 : oldItem.getVersion(); + newItem = flagBuilder.createFlag(oldVersion + 1); + currentFlags.put(key, newItem); + currentBuilders.put(key, clonedBuilder); + } + + for (DataSourceImpl instance: instances) { + instance.updates.upsert(DataModel.FEATURES, key, newItem); + } + + return this; + } + + /** + * Simulates a change in the data source status. + *

    + * Use this if you want to test the behavior of application code that uses + * {@link com.launchdarkly.sdk.server.LDClient#getDataSourceStatusProvider()} to track whether the data + * source is having problems (for example, a network failure interruptsingthe streaming connection). It + * does not actually stop the {@code TestData} data source from working, so even if you have simulated + * an outage, calling {@link #update(FlagBuilder)} will still send updates. + * + * @param newState one of the constants defined by {@link com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State} + * @param newError an {@link com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo} instance, + * or null + * @return the same {@code TestData} instance + */ + public TestData updateStatus(DataSourceStatusProvider.State newState, DataSourceStatusProvider.ErrorInfo newError) { + for (DataSourceImpl instance: instances) { + instance.updates.updateStatus(newState, newError); + } + return this; + } + + @Override + public DataSource build(ClientContext context) { + DataSourceImpl instance = new DataSourceImpl(context.getDataSourceUpdateSink()); + synchronized (lock) { + instances.add(instance); + } + return instance; + } + + private FullDataSet makeInitData() { + ImmutableMap copiedData; + synchronized (lock) { + copiedData = ImmutableMap.copyOf(currentFlags); + } + return new FullDataSet<>(ImmutableMap.of(DataModel.FEATURES, new KeyedItems<>(copiedData.entrySet())).entrySet()); + } + + private void closedInstance(DataSourceImpl instance) { + synchronized (lock) { + instances.remove(instance); + } + } + + /** + * A builder for feature flag configurations to be used with {@link TestData}. + * + * @see TestData#flag(String) + * @see TestData#update(FlagBuilder) + */ + public static final class FlagBuilder { + private static final int TRUE_VARIATION_FOR_BOOLEAN = 0; + private static final int FALSE_VARIATION_FOR_BOOLEAN = 1; + + final String key; + int offVariation; + boolean on; + int fallthroughVariation; + CopyOnWriteArrayList variations; + private Long samplingRatio; + private Long migrationCheckRatio; + + final Map>> targets = new TreeMap<>(); // TreeMap enforces ordering for test determinacy + final List rules = new ArrayList<>(); + + private FlagBuilder(String key) { + this.key = key; + this.on = true; + this.variations = new CopyOnWriteArrayList<>(); + } + + private FlagBuilder(FlagBuilder from) { + this.key = from.key; + this.offVariation = from.offVariation; + this.on = from.on; + this.fallthroughVariation = from.fallthroughVariation; + this.variations = new CopyOnWriteArrayList<>(from.variations); + for (ContextKind contextKind: from.targets.keySet()) { + this.targets.put(contextKind, new TreeMap<>(from.targets.get(contextKind))); + } + this.rules.addAll(from.rules); + } + + private boolean isBooleanFlag() { + return variations.size() == 2 && + variations.get(TRUE_VARIATION_FOR_BOOLEAN).equals(LDValue.of(true)) && + variations.get(FALSE_VARIATION_FOR_BOOLEAN).equals(LDValue.of(false)); + } + + /** + * A shortcut for setting the flag to use the standard boolean configuration. + *

    + * This is the default for all new flags created with {@link TestData#flag(String)}. The flag + * will have two variations, {@code true} and {@code false} (in that order); it will return + * {@code false} whenever targeting is off, and {@code true} when targeting is on if no other + * settings specify otherwise. + * + * @return the builder + */ + public FlagBuilder booleanFlag() { + if (isBooleanFlag()) { + return this; + } + return variations(LDValue.of(true), LDValue.of(false)) + .fallthroughVariation(TRUE_VARIATION_FOR_BOOLEAN) + .offVariation(FALSE_VARIATION_FOR_BOOLEAN); + } + + /** + * Sets targeting to be on or off for this flag. + *

    + * The effect of this depends on the rest of the flag configuration, just as it does on the + * real LaunchDarkly dashboard. In the default configuration that you get from calling + * {@link TestData#flag(String)} with a new flag key, the flag will return {@code false} + * whenever targeting is off, and {@code true} when targeting is on. + * + * @param on true if targeting should be on + * @return the builder + */ + public FlagBuilder on(boolean on) { + this.on = on; + return this; + } + + /** + * Specifies the fallthrough variation for a boolean flag. The fallthrough is the value + * that is returned if targeting is on and the context was not matched by a more specific + * target or rule. + *

    + * If the flag was previously configured with other variations, this also changes it to a + * boolean flag. + * + * @param value true if the flag should return true by default when targeting is on + * @return the builder + */ + public FlagBuilder fallthroughVariation(boolean value) { + return this.booleanFlag().fallthroughVariation(variationForBoolean(value)); + } + + /** + * Specifies the index of the fallthrough variation. The fallthrough is the variation + * that is returned if targeting is on and the context was not matched by a more specific + * target or rule. + * + * @param variationIndex the desired fallthrough variation: 0 for the first, 1 for the second, etc. + * @return the builder + */ + public FlagBuilder fallthroughVariation(int variationIndex) { + this.fallthroughVariation = variationIndex; + return this; + } + + /** + * Specifies the off variation for a boolean flag. This is the variation that is returned + * whenever targeting is off. + * + * @param value true if the flag should return true when targeting is off + * @return the builder + */ + public FlagBuilder offVariation(boolean value) { + return this.booleanFlag().offVariation(variationForBoolean(value)); + } + + /** + * Specifies the index of the off variation. This is the variation that is returned + * whenever targeting is off. + * + * @param variationIndex the desired off variation: 0 for the first, 1 for the second, etc. + * @return the builder + */ + public FlagBuilder offVariation(int variationIndex) { + this.offVariation = variationIndex; + return this; + } + + /** + * Sets the flag to always return the specified boolean variation for all contexts. + *

    + * Targeting is switched on, any existing targets or rules are removed, and the flag's variations are + * set to true and false. The fallthrough variation is set to the specified value. The off variation is + * left unchanged. + * + * @param variation the desired true/false variation to be returned for all contexts + * @return the builder + * @see #variationForAll(int) + * @see #valueForAll(LDValue) + * @since 5.10.0 + */ + public FlagBuilder variationForAll(boolean variation) { + return booleanFlag().variationForAll(variationForBoolean(variation)); + } + + /** + * Sets the flag to always return the specified variation for all contexts. + *

    + * The variation is specified by number, out of whatever variation values have already been + * defined. Targeting is switched on, and any existing targets or rules are removed. The fallthrough + * variation is set to the specified value. The off variation is left unchanged. + * + * @param variationIndex the desired variation: 0 for the first, 1 for the second, etc. + * @return the builder + * @see #variationForAll(boolean) + * @see #valueForAll(LDValue) + */ + public FlagBuilder variationForAll(int variationIndex) { + return on(true).clearRules().clearTargets().fallthroughVariation(variationIndex); + } + + /** + * Sets the flag to always return the specified variation value for all users. + *

    + * The value may be of any JSON type, as defined by {@link LDValue}. This method changes the + * flag to have only a single variation, which is this value, and to return the same + * variation regardless of whether targeting is on or off. Any existing targets or rules + * are removed. + * + * @param value the desired value to be returned for all users + * @return the builder + * @see #variationForAll(boolean) + * @see #variationForAll(int) + */ + public FlagBuilder valueForAll(LDValue value) { + variations.clear(); + variations.add(value); + return variationForAll(0); + } + + /** + * Sets the flag to return the specified boolean variation for a specific user key (that is, + * for a context with that key whose context kind is "user") when targeting is on. + *

    + * This has no effect when targeting is turned off for the flag. + *

    + * If the flag was not already a boolean flag, this also changes it to a boolean flag. + * + * @param userKey a user key + * @param variation the desired true/false variation to be returned for this user when + * targeting is on + * @return the builder + * @see #variationForUser(String, int) + * @see #variationForKey(ContextKind, String, boolean) + */ + public FlagBuilder variationForUser(String userKey, boolean variation) { + return variationForKey(ContextKind.DEFAULT, userKey, variation); + } + + /** + * Sets the flag to return the specified boolean variation for a specific context, identified + * by context kind and key, when targeting is on. + *

    + * This has no effect when targeting is turned off for the flag. + *

    + * If the flag was not already a boolean flag, this also changes it to a boolean flag. + * + * @param contextKind the context kind + * @param key the context key + * @param variation the desired true/false variation to be returned for this context when + * targeting is on + * @return the builder + * @see #variationForKey(ContextKind, String, int) + * @see #variationForUser(String, boolean) + * @since 6.0.0 + */ + public FlagBuilder variationForKey(ContextKind contextKind, String key, boolean variation) { + return booleanFlag().variationForKey(contextKind, key, variationForBoolean(variation)); + } + + /** + * Sets the flag to return the specified variation for a specific user key (that is, + * for a context with that key whose context kind is "user") when targeting is on. + *

    + * This has no effect when targeting is turned off for the flag. + *

    + * The variation is specified by number, out of whatever variation values have already been + * defined. + * + * @param userKey a user key + * @param variationIndex the desired variation to be returned for this user when targeting is on: + * 0 for the first, 1 for the second, etc. + * @return the builder + * @see #variationForKey(ContextKind, String, int) + * @see #variationForUser(String, boolean) + */ + public FlagBuilder variationForUser(String userKey, int variationIndex) { + return variationForKey(ContextKind.DEFAULT, userKey, variationIndex); + } + + /** + * Sets the flag to return the specified boolean variation for a specific context, identified + * by context kind and key, when targeting is on. + *

    + * This has no effect when targeting is turned off for the flag. + *

    + * If the flag was not already a boolean flag, this also changes it to a boolean flag. + * + * @param contextKind the context kind + * @param key the context key + * @param variationIndex the desired variation to be returned for this context when targeting is on: + * 0 for the first, 1 for the second, etc. + * @return the builder + * @see #variationForKey(ContextKind, String, boolean) + * @see #variationForUser(String, int) + * @since 6.0.0 + */ + public FlagBuilder variationForKey(ContextKind contextKind, String key, int variationIndex) { + if (contextKind == null) { + contextKind = ContextKind.DEFAULT; + } + Map> keysByVariation = targets.get(contextKind); + if (keysByVariation == null) { + keysByVariation = new TreeMap<>(); // TreeMap keeps variations in order for test determinacy + targets.put(contextKind, keysByVariation); + } + for (int i = 0; i < variations.size(); i++) { + ImmutableSet keys = keysByVariation.get(i); + if (i == variationIndex) { + if (keys == null) { + keysByVariation.put(i, ImmutableSortedSet.of(key)); + } else if (!keys.contains(key)) { + keysByVariation.put(i, ImmutableSortedSet.naturalOrder().addAll(keys).add(key).build()); + } + } else { + if (keys != null && keys.contains(key)) { + keysByVariation.put(i, ImmutableSortedSet.copyOf(Iterables.filter(keys, k -> !k.equals(key)))); + } + } + } + // Note, we use ImmutableSortedSet just to make the output determinate for our own testing + return this; + } + + /** + * Changes the allowable variation values for the flag. + *

    + * The value may be of any JSON type, as defined by {@link LDValue}. For instance, a boolean flag + * normally has {@code LDValue.of(true), LDValue.of(false)}; a string-valued flag might have + * {@code LDValue.of("red"), LDValue.of("green")}; etc. + * + * @param values the desired variations + * @return the builder + */ + public FlagBuilder variations(LDValue... values) { + variations.clear(); + for (LDValue v: values) { + variations.add(v); + } + return this; + } + + /** + * Starts defining a flag rule, using the "is one of" operator. This matching expression only + * applies to contexts of a specific kind. + *

    + * For example, this creates a rule that returns {@code true} if the name attribute for the + * "company" context is "Ella" or "Monsoon": + * + *

    
    +     *     testData.flag("flag")
    +     *         .ifMatch(ContextKind.of("company"), "name",
    +     *             LDValue.of("Ella"), LDValue.of("Monsoon"))
    +     *         .thenReturn(true));
    +     * 
    + * + * @param contextKind the context kind to match + * @param attribute the attribute to match against + * @param values values to compare to + * @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#thenReturn(boolean)} or + * {@link FlagRuleBuilder#thenReturn(int)} to finish the rule, or add more tests with another + * method like {@link FlagRuleBuilder#andMatch(ContextKind, String, LDValue...)} + * @see #ifMatch(String, LDValue...) + * @see #ifNotMatch(ContextKind, String, LDValue...) + * @since 6.0.0 + */ + public FlagRuleBuilder ifMatch(ContextKind contextKind, String attribute, LDValue... values) { + return new FlagRuleBuilder().andMatch(contextKind, attribute, values); + } + + /** + * Starts defining a flag rule, using the "is one of" operator. This is a shortcut for calling + * {@link #ifMatch(ContextKind, String, LDValue...)} with {@link ContextKind#DEFAULT} as the + * context kind. + *

    + * For example, this creates a rule that returns {@code true} if the name is "Patsy" or "Edina": + * + *

    
    +     *     testData.flag("flag")
    +     *         .ifMatch("name", LDValue.of("Patsy"), LDValue.of("Edina"))
    +     *         .thenReturn(true));
    +     * 
    + * + * @param attribute the user attribute to match against + * @param values values to compare to + * @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#thenReturn(boolean)} or + * {@link FlagRuleBuilder#thenReturn(int)} to finish the rule, or add more tests with another + * method like {@link FlagRuleBuilder#andMatch(String, LDValue...)} + * @see #ifMatch(ContextKind, String, LDValue...) + * @see #ifNotMatch(String, LDValue...) + */ + public FlagRuleBuilder ifMatch(String attribute, LDValue... values) { + return ifMatch(ContextKind.DEFAULT, attribute, values); + } + + /** + * Starts defining a flag rule, using the "is not one of" operator. This matching expression only + * applies to contexts of a specific kind. + *

    + * For example, this creates a rule that returns {@code true} if the name attribute for the + * "company" context is neither "Pendant" nor "Sterling Cooper": + * + *

    
    +     *     testData.flag("flag")
    +     *         .ifNotMatch(ContextKind.of("company"), "name",
    +     *             LDValue.of("Pendant"), LDValue.of("Sterling Cooper"))
    +     *         .thenReturn(true));
    +     * 
    + * + * @param contextKind the context kind to match + * @param attribute the attribute to match against + * @param values values to compare to + * @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#thenReturn(boolean)} or + * {@link FlagRuleBuilder#thenReturn(int)} to finish the rule, or add more tests with another + * method like {@link FlagRuleBuilder#andMatch(ContextKind, String, LDValue...)} + * @see #ifMatch(ContextKind, String, LDValue...) + * @see #ifNotMatch(String, LDValue...) + * @since 6.0.0 + */ + public FlagRuleBuilder ifNotMatch(ContextKind contextKind, String attribute, LDValue... values) { + return new FlagRuleBuilder().andNotMatch(contextKind, attribute, values); + } + + /** + * Starts defining a flag rule, using the "is not one of" operator. This is a shortcut for calling + * {@link #ifNotMatch(ContextKind, String, LDValue...)} with {@link ContextKind#DEFAULT} as the + * context kind. + *

    + * For example, this creates a rule that returns {@code true} if the name is neither "Saffron" nor "Bubble": + * + *

    
    +     *     testData.flag("flag")
    +     *         .ifNotMatch("name", LDValue.of("Saffron"), LDValue.of("Bubble"))
    +     *         .thenReturn(true));
    +     * 
    + + * @param attribute the user attribute to match against + * @param values values to compare to + * @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#thenReturn(boolean)} or + * {@link FlagRuleBuilder#thenReturn(int)} to finish the rule, or add more tests with another + * method like {@link FlagRuleBuilder#andMatch(String, LDValue...)} + * @see #ifNotMatch(ContextKind, String, LDValue...) + * @see #ifMatch(String, LDValue...) + */ + public FlagRuleBuilder ifNotMatch(String attribute, LDValue... values) { + return ifNotMatch(ContextKind.DEFAULT, attribute, values); + } + + /** + * Removes any existing rules from the flag. This undoes the effect of methods like + * {@link #ifMatch(String, LDValue...)}. + * + * @return the same builder + */ + public FlagBuilder clearRules() { + rules.clear(); + return this; + } + + /** + * Removes any existing user/context targets from the flag. This undoes the effect of methods like + * {@link #variationForUser(String, boolean)}. + * + * @return the same builder + */ + public FlagBuilder clearTargets() { + targets.clear(); + return this; + } + + ItemDescriptor createFlag(int version) { + ObjectBuilder builder = LDValue.buildObject() + .put("key", key) + .put("version", version) + .put("on", on) + .put("offVariation", offVariation) + .put("fallthrough", LDValue.buildObject().put("variation", fallthroughVariation).build()); + + if(samplingRatio != null) { + builder.put("samplingRatio", samplingRatio); + } + + if(migrationCheckRatio != null) { + builder.put("migration", LDValue.buildObject() + .put("checkRatio", migrationCheckRatio) + .build()); + } + + // The following properties shouldn't actually be used in evaluations of this flag, but + // adding them makes the JSON output more predictable for tests + builder.put("prerequisites", LDValue.arrayOf()) + .put("salt", ""); + + ArrayBuilder jsonVariations = LDValue.buildArray(); + for (LDValue v: variations) { + jsonVariations.add(v); + } + builder.put("variations", jsonVariations.build()); + + ArrayBuilder jsonTargets = LDValue.buildArray(); + ArrayBuilder jsonContextTargets = LDValue.buildArray(); + if (!targets.isEmpty()) { + if (targets.get(ContextKind.DEFAULT) != null) { + for (Map.Entry> e: targets.get(ContextKind.DEFAULT).entrySet()) { + if (!e.getValue().isEmpty()) { + jsonTargets.add(LDValue.buildObject() + .put("variation", e.getKey().intValue()) + .put("values", LDValue.Convert.String.arrayFrom(e.getValue())) + .build()); + } + } + } + for (ContextKind contextKind: targets.keySet()) { + for (Map.Entry> e: targets.get(contextKind).entrySet()) { + if (!e.getValue().isEmpty()) { + jsonContextTargets.add(LDValue.buildObject() + .put("contextKind", contextKind.toString()) + .put("variation", e.getKey().intValue()) + .put("values", contextKind.isDefault() ? LDValue.arrayOf() : + LDValue.Convert.String.arrayFrom(e.getValue())) + .build()); + } + } + } + } + builder.put("targets", jsonTargets.build()); + builder.put("contextTargets", jsonContextTargets.build()); + + ArrayBuilder jsonRules = LDValue.buildArray(); + if (!rules.isEmpty()) { + int ri = 0; + for (FlagRuleBuilder r: rules) { + ArrayBuilder jsonClauses = LDValue.buildArray(); + for (Clause c: r.clauses) { + ArrayBuilder jsonValues = LDValue.buildArray(); + for (LDValue v: c.values) { + jsonValues.add(v); + } + jsonClauses.add(LDValue.buildObject() + .put("contextKind", c.contextKind == null ? null : c.contextKind.toString()) + .put("attribute", c.attribute.toString()) + .put("op", c.operator) + .put("values", jsonValues.build()) + .put("negate", c.negate) + .build()); + } + jsonRules.add(LDValue.buildObject() + .put("id", "rule" + ri) + .put("variation", r.variation) + .put("clauses", jsonClauses.build()) + .build()); + ri++; + } + } + builder.put("rules", jsonRules.build()); + + String json = builder.build().toJsonString(); + return DataModel.FEATURES.deserialize(json); + } + + /** + * Set the samplingRatio, used for event generation, for this flag. + * + * @param samplingRatio the event sampling ratio + * @return a reference to this builder + */ + public FlagBuilder samplingRatio(long samplingRatio) { + this.samplingRatio = samplingRatio; + return this; + } + + /** + * Turn this flag into a migration flag and set the check ratio. + * + * @param checkRatio the check ratio + * @return a reference to this builder + */ + public FlagBuilder migrationCheckRatio(long checkRatio) { + migrationCheckRatio = checkRatio; + return this; + } + + private static int variationForBoolean(boolean value) { + return value ? TRUE_VARIATION_FOR_BOOLEAN : FALSE_VARIATION_FOR_BOOLEAN; + } + + /** + * A builder for feature flag rules to be used with {@link FlagBuilder}. + *

    + * In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of + * clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the + * rule's clauses match the user. + *

    + * To start defining a rule, use one of the flag builder's matching methods such as + * {@link FlagBuilder#ifMatch(String, LDValue...)}. This defines the first clause for the rule. + * Optionally, you may add more clauses with the rule builder's methods such as + * {@link #andMatch(String, LDValue...)}. Finally, call {@link #thenReturn(boolean)} or + * {@link #thenReturn(int)} to finish defining the rule. + */ + public final class FlagRuleBuilder { + final List clauses = new ArrayList<>(); + int variation; + + /** + * Adds another clause, using the "is one of" operator. This matching expression only + * applies to contexts of a specific kind. + *

    + * For example, this creates a rule that returns {@code true} if the name attribute for the + * "company" context is "Ella" and the country is "gb": + * + *

    
    +       *     testData.flag("flag")
    +       *         .ifMatch(ContextKind.of("company"), "name", LDValue.of("Ella"))
    +       *         .andMatch(ContextKind.of("company"), "country", LDValue.of("gb"))
    +       *         .thenReturn(true));
    +       * 
    + * + * @param contextKind the context kind to match + * @param attribute the attribute to match against + * @param values values to compare to + * @return the rule builder + * @see #andNotMatch(ContextKind, String, LDValue...) + * @see #andMatch(String, LDValue...) + * @since 6.0.0 + */ + public FlagRuleBuilder andMatch(ContextKind contextKind, String attribute, LDValue... values) { + if (attribute != null) { + clauses.add(new Clause(contextKind, AttributeRef.fromPath(attribute), "in", values, false)); + } + return this; + } + + /** + * Adds another clause, using the "is one of" operator. This is a shortcut for calling + * {@link #andMatch(ContextKind, String, LDValue...)} with {@link ContextKind#DEFAULT} as the context kind. + *

    + * For example, this creates a rule that returns {@code true} if the name is "Patsy" and the + * country is "gb": + * + *

    
    +       *     testData.flag("flag")
    +       *         .ifMatch("name", LDValue.of("Patsy"))
    +       *         .andMatch("country", LDValue.of("gb"))
    +       *         .thenReturn(true));
    +       * 
    + * + * @param attribute the user attribute to match against + * @param values values to compare to + * @return the rule builder + * @see #andNotMatch(String, LDValue...) + * @see #andMatch(ContextKind, String, LDValue...) + */ + public FlagRuleBuilder andMatch(String attribute, LDValue... values) { + return andMatch(ContextKind.DEFAULT, attribute, values); + } + + /** + * Adds another clause, using the "is not one of" operator. This matching expression only + * applies to contexts of a specific kind. + *

    + * For example, this creates a rule that returns {@code true} if the name attribute for the + * "company" context is "Ella" and the country is not "gb": + * + *

    
    +       *     testData.flag("flag")
    +       *         .ifMatch(ContextKind.of("company"), "name", LDValue.of("Ella"))
    +       *         .andNotMatch(ContextKind.of("company"), "country", LDValue.of("gb"))
    +       *         .thenReturn(true));
    +       * 
    + * + * @param contextKind the context kind to match + * @param attribute the user attribute to match against + * @param values values to compare to + * @return the rule builder + * @see #andMatch(ContextKind, String, LDValue...) + * @see #andNotMatch(String, LDValue...) + * @since 6.0.0 + */ + public FlagRuleBuilder andNotMatch(ContextKind contextKind, String attribute, LDValue... values) { + if (attribute != null) { + clauses.add(new Clause(contextKind, AttributeRef.fromPath(attribute), "in", values, true)); + } + return this; + } + + /** + * Adds another clause, using the "is not one of" operator. + *

    + * For example, this creates a rule that returns {@code true} if the name is "Patsy" and the + * country is not "gb": + * + *

    
    +       *     testData.flag("flag")
    +       *         .ifMatch("name", LDValue.of("Patsy"))
    +       *         .andNotMatch("country", LDValue.of("gb"))
    +       *         .thenReturn(true));
    +       * 
    + * + * @param attribute the user attribute to match against + * @param values values to compare to + * @return the rule builder + * @see #andMatch(String, LDValue...) + * @see #andNotMatch(ContextKind, String, LDValue...) + */ + public FlagRuleBuilder andNotMatch(String attribute, LDValue... values) { + return andNotMatch(ContextKind.DEFAULT, attribute, values); + } + + /** + * Finishes defining the rule, specifying the result value as a boolean. + * + * @param variation the value to return if the rule matches the user + * @return the flag builder + */ + public FlagBuilder thenReturn(boolean variation) { + FlagBuilder.this.booleanFlag(); + return thenReturn(variationForBoolean(variation)); + } + + /** + * Finishes defining the rule, specifying the result as a variation index. + * + * @param variationIndex the variation to return if the rule matches the user: 0 for the first, 1 + * for the second, etc. + * @return the flag builder + */ + public FlagBuilder thenReturn(int variationIndex) { + this.variation = variationIndex; + FlagBuilder.this.rules.add(this); + return FlagBuilder.this; + } + } + + private static final class Clause { + final ContextKind contextKind; + final AttributeRef attribute; + final String operator; + final LDValue[] values; + final boolean negate; + + Clause(ContextKind contextKind, AttributeRef attribute, String operator, LDValue[] values, boolean negate) { + this.contextKind = contextKind; + this.attribute = attribute; + this.operator = operator; + this.values = values; + this.negate = negate; + } + } + } + + private final class DataSourceImpl implements DataSource { + final DataSourceUpdateSink updates; + + DataSourceImpl(DataSourceUpdateSink updates) { + this.updates = updates; + } + + @Override + public Future start() { + updates.init(makeInitData()); + updates.updateStatus(State.VALID, null); + return completedFuture(null); + } + + @Override + public boolean isInitialized() { + return true; + } + + @Override + public void close() throws IOException { + closedInstance(this); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilder.java new file mode 100644 index 0000000..38d0678 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilder.java @@ -0,0 +1,42 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.interfaces.WrapperInfo; + +/** + * Contains methods for configuring wrapper information. + *

    + * This builder is primarily intended for use by LaunchDarkly in developing wrapper SDKs. + *

    + * If the WrapperBuilder is used, then it will replace the wrapper information from the HttpPropertiesBuilder. + * Additionally, any wrapper SDK may overwrite any application developer provided wrapper information. + */ + public abstract class WrapperInfoBuilder { + protected String wrapperName; + protected String wrapperVersion; + + /** + * Set the name of the wrapper. + * + * @param wrapperName the name of the wrapper + * @return the builder + */ + public WrapperInfoBuilder wrapperName(String wrapperName) { + this.wrapperName = wrapperName; + return this; + } + + /** + * Set the version of the wrapper. + *

    + * This information will not be used unless the wrapperName is also set. + * + * @param wrapperVersion the version of the wrapper + * @return the builder + */ + public WrapperInfoBuilder wrapperVersion(String wrapperVersion) { + this.wrapperVersion = wrapperVersion; + return this; + } + + public abstract WrapperInfo build(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/package-info.java new file mode 100644 index 0000000..7c5d27c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/package-info.java @@ -0,0 +1,11 @@ +/** + * This package contains integration tools for connecting the SDK to other software components, or + * configuring how it connects to LaunchDarkly. + *

    + * In the current main LaunchDarkly Java SDK library, this package contains the configuration builders + * for the standard SDK components such as {@link com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder}, + * the {@link com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder} builder for use with + * database integrations (the specific database integrations themselves are provided by add-on libraries), + * and {@link com.launchdarkly.sdk.server.integrations.FileData} (for reading flags from a file in testing). + */ +package com.launchdarkly.sdk.server.integrations; diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClient.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClient.java new file mode 100644 index 0000000..5ba41a7 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClient.java @@ -0,0 +1,187 @@ +package com.launchdarkly.sdk.server.integrations.reactor; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.FeatureFlagsState; +import com.launchdarkly.sdk.server.FlagsStateOption; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagTracker; + +import java.util.concurrent.Callable; + +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Scheduler; + +/** + * A thin wrapper of the {@link LDClient} that aims to adapt it to reactive stream programming. + * + * Methods that are potentially long running or that use IO have been wrapped to return {@link Mono}s and will be + * executed on the scheduler provided. Methods that do not have a risk of blocking have not been wrapped and are + * pass through. + */ +public final class LDReactorClient implements LDReactorClientInterface { + + private final LDClient wrappedClient; + private final Scheduler scheduler; + + /** + * Creates a client that uses the provided scheduler to execute functionality in a non-blocking manner. + * + * @param sdkKey the SDK key for your LaunchDarkly environment + * @param scheduler that will execute wrapped client methods + */ + public LDReactorClient(String sdkKey, Scheduler scheduler) { + this.wrappedClient = new LDClient(sdkKey); + this.scheduler = scheduler; + } + + /** + * Creates a client that uses the provided scheduler to execute functionality in a non-blocking manner. + * + * @param sdkKey the SDK key for your LaunchDarkly environment + * @param config a client configuration object + * @param scheduler that will execute wrapped client methods + */ + public LDReactorClient(String sdkKey, LDConfig config, Scheduler scheduler) { + this.wrappedClient = new LDClient(sdkKey, config); + this.scheduler = scheduler; + } + + @Override + public boolean isInitialized() { + return wrappedClient.isInitialized(); + } + + @Override + public void track(String eventName, LDContext context) { + wrappedClient.track(eventName, context); + } + + @Override + public void trackData(String eventName, LDContext context, LDValue data) { + wrappedClient.trackData(eventName, context, data); + } + + @Override + public void trackMetric(String eventName, LDContext context, LDValue data, double metricValue) { + wrappedClient.trackMetric(eventName, context, data, metricValue); + } + + @Override + public void identify(LDContext context) { + wrappedClient.identify(context); + } + + @Override + public Mono allFlagsState(LDContext context, FlagsStateOption... options) { + return Mono.fromCallable(() -> wrappedClient.allFlagsState(context, options)).subscribeOn(this.scheduler); + } + + @Override + public Mono boolVariation(String featureKey, LDContext context, boolean defaultValue) { + return Mono.fromCallable(() -> wrappedClient.boolVariation(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono intVariation(String featureKey, LDContext context, int defaultValue) { + return Mono.fromCallable(() -> wrappedClient.intVariation(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono doubleVariation(String featureKey, LDContext context, double defaultValue) { + return Mono.fromCallable(() -> wrappedClient.doubleVariation(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono stringVariation(String featureKey, LDContext context, String defaultValue) { + return Mono.fromCallable(() -> wrappedClient.stringVariation(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono jsonValueVariation(String featureKey, LDContext context, LDValue defaultValue) { + return Mono.fromCallable(() -> wrappedClient.jsonValueVariation(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono> boolVariationDetail(String featureKey, LDContext context, boolean defaultValue) { + return Mono.fromCallable(() -> wrappedClient.boolVariationDetail(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono> intVariationDetail(String featureKey, LDContext context, int defaultValue) { + return Mono.fromCallable(() -> wrappedClient.intVariationDetail(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono> doubleVariationDetail(String featureKey, LDContext context, double defaultValue) { + return Mono.fromCallable(() -> wrappedClient.doubleVariationDetail(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono> stringVariationDetail(String featureKey, LDContext context, String defaultValue) { + return Mono.fromCallable(() -> wrappedClient.stringVariationDetail(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public Mono> jsonValueVariationDetail(String featureKey, LDContext context, LDValue defaultValue) { + return Mono.fromCallable(() -> wrappedClient.jsonValueVariationDetail(featureKey, context, defaultValue)).subscribeOn(this.scheduler); + } + + @Override + public boolean isFlagKnown(String featureKey) { + return wrappedClient.isFlagKnown(featureKey); + } + + @Override + public FlagTracker getFlagTracker() { + return wrappedClient.getFlagTracker(); + } + + @Override + public BigSegmentStoreStatusProvider getBigSegmentStoreStatusProvider() { + return wrappedClient.getBigSegmentStoreStatusProvider(); + } + + @Override + public DataStoreStatusProvider getDataStoreStatusProvider() { + return wrappedClient.getDataStoreStatusProvider(); + } + + @Override + public DataSourceStatusProvider getDataSourceStatusProvider() { + return wrappedClient.getDataSourceStatusProvider(); + } + + @Override + public Mono close() { + return Mono.fromCallable((Callable) () -> { + wrappedClient.close(); + return null; + }).subscribeOn(this.scheduler); + } + + @Override + public void flush() { + wrappedClient.flush(); + } + + @Override + public boolean isOffline() { + return wrappedClient.isOffline(); + } + + @Override + public String secureModeHash(LDContext context) { + return wrappedClient.secureModeHash(context); + } + + @Override + public String version() { + return wrappedClient.version(); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClientInterface.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClientInterface.java new file mode 100644 index 0000000..7e64a7f --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/LDReactorClientInterface.java @@ -0,0 +1,243 @@ +package com.launchdarkly.sdk.server.integrations.reactor; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.FeatureFlagsState; +import com.launchdarkly.sdk.server.FlagsStateOption; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagTracker; + +import reactor.core.publisher.Mono; + +/** + * A version of {@link LDClient} that is adapted to support reactive stream programming. + */ +public interface LDReactorClientInterface { + + /** + * See {@link LDClient#isInitialized()}. + * + * @return see linked reference. + */ + boolean isInitialized(); + + /** + * See {@link LDClient#track(String, LDContext)}. + * + * @param eventName see linked reference. + * @param context see linked reference. + */ + void track(String eventName, LDContext context); + + /** + * See {@link LDClient#trackData(String, LDContext, LDValue)}. + * + * @param eventName see linked reference. + * @param context see linked reference. + * @param data see linked reference. + */ + void trackData(String eventName, LDContext context, LDValue data); + + /** + * See {@link LDClient#trackMetric(String, LDContext, LDValue, double)}. + * + * @param eventName see linked reference. + * @param context see linked reference. + * @param data see linked reference. + * @param metricValue see linked reference. + */ + void trackMetric(String eventName, LDContext context, LDValue data, double metricValue); + + /** + * See {@link LDClient#identify(LDContext)}. + * + * @param context see linked reference. + */ + void identify(LDContext context); + + /** + * See {@link LDClient#allFlagsState(LDContext, FlagsStateOption...)}. + * + * @param context see linked reference. + * @param options see linked reference. + * @return a {@link Mono} that will emit the {@link FeatureFlagsState}. + */ + Mono allFlagsState(LDContext context, FlagsStateOption... options); + + /** + * See {@link LDClient#boolVariation(String, LDContext, boolean)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono boolVariation(String featureKey, LDContext context, boolean defaultValue); + + /** + * See {@link LDClient#intVariation(String, LDContext, int)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono intVariation(String featureKey, LDContext context, int defaultValue); + + /** + * See {@link LDClient#doubleVariation(String, LDContext, double)} + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono doubleVariation(String featureKey, LDContext context, double defaultValue); + + /** + * See {@link LDClient#stringVariation(String, LDContext, String)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono stringVariation(String featureKey, LDContext context, String defaultValue); + + /** + * See {@link LDClient#jsonValueVariation(String, LDContext, LDValue)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono jsonValueVariation(String featureKey, LDContext context, LDValue defaultValue); + + /** + * See {@link LDClient#boolVariationDetail(String, LDContext, boolean)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono> boolVariationDetail(String featureKey, LDContext context, boolean defaultValue); + + /** + * See {@link LDClient#intVariationDetail(String, LDContext, int)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono> intVariationDetail(String featureKey, LDContext context, int defaultValue); + + /** + * See {@link LDClient#doubleVariationDetail(String, LDContext, double)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono> doubleVariationDetail(String featureKey, LDContext context, double defaultValue); + + /** + * See {@link LDClient#stringVariationDetail(String, LDContext, String)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono> stringVariationDetail(String featureKey, LDContext context, String defaultValue); + + /** + * See {@link LDClient#jsonValueVariationDetail(String, LDContext, LDValue)}. + * + * @param featureKey see linked reference. + * @param context see linked reference. + * @param defaultValue see linked reference. + * @return a {@link Mono} that will emit the evaluation result. + */ + Mono> jsonValueVariationDetail(String featureKey, LDContext context, LDValue defaultValue); + + /** + * See {@link LDClient#isFlagKnown(String)}. + * + * @param featureKey see linked reference. + * @return see linked reference. + */ + boolean isFlagKnown(String featureKey); + + /** + * See {@link LDClient#getFlagTracker()}. + * + * @return see linked reference. + */ + FlagTracker getFlagTracker(); + + /** + * See {@link LDClient#getBigSegmentStoreStatusProvider()}. Getting the {@link BigSegmentStoreStatusProvider} is + * not a blocking operation, but function calls on the {@link BigSegmentStoreStatusProvider} may be. + * + * @return see linked reference. + */ + BigSegmentStoreStatusProvider getBigSegmentStoreStatusProvider(); + + /** + * See {@link LDClient#getDataStoreStatusProvider()}. Getting the {@link DataStoreStatusProvider} is not a blocking + * operation, but function calls on the {@link DataStoreStatusProvider} may be. + * + * @return see linked reference. + */ + DataStoreStatusProvider getDataStoreStatusProvider(); + + /** + * See {@link LDClient#getDataSourceStatusProvider()}. Getting the {@link DataSourceStatusProvider} is not a + * blocking operation, but function calls on the {@link DataSourceStatusProvider} may be. + * + * @return see linked reference. + */ + DataSourceStatusProvider getDataSourceStatusProvider(); + + /** + * See {@link LDClient#close()}. + * + * @return a Mono that completes when {@link LDClient#close()} completes. + */ + Mono close(); + + /** + * See {@link LDClient#flush()}. + */ + void flush(); + + /** + * See {@link LDClient#isOffline()}. + * + * @return see linked reference. + */ + boolean isOffline(); + + /** + * See {@link LDClient#secureModeHash(LDContext)}. + * + * @param context see linked reference. + * @return see linked reference. + */ + String secureModeHash(LDContext context); + + /** + * See {@link LDClient#version()}. + * + * @return see linked reference. + */ + String version(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/package-info.java new file mode 100644 index 0000000..2b1ff0d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/integrations/reactor/package-info.java @@ -0,0 +1,4 @@ +/** + * This package contains components for using the SDK in reactive stream programming. + */ +package com.launchdarkly.sdk.server.integrations.reactor; diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ApplicationInfo.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ApplicationInfo.java new file mode 100644 index 0000000..e64ed6c --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ApplicationInfo.java @@ -0,0 +1,46 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder; + +/** + * Encapsulates the SDK's application metadata. + *

    + * See {@link ApplicationInfoBuilder} for more details on these properties. + * + * @since 5.8.0 + */ +public final class ApplicationInfo { + private String applicationId; + private String applicationVersion; + + /** + * Used internally by the SDK to store application metadata. + * + * @param applicationId the application ID + * @param applicationVersion the application version + * @see ApplicationInfoBuilder + */ + public ApplicationInfo(String applicationId, String applicationVersion) { + this.applicationId = applicationId; + this.applicationVersion = applicationVersion; + } + + /** + * A unique identifier representing the application where the LaunchDarkly SDK is running. + * + * @return the application identifier, or null + */ + public String getApplicationId() { + return applicationId; + } + + /** + * A unique identifier representing the version of the application where the + * LaunchDarkly SDK is running. + * + * @return the application version, or null + */ + public String getApplicationVersion() { + return applicationVersion; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentStoreStatusProvider.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentStoreStatusProvider.java new file mode 100644 index 0000000..3c43861 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentStoreStatusProvider.java @@ -0,0 +1,135 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder; + +import java.time.Duration; +import java.util.Objects; + +/** + * An interface for querying the status of a Big Segment store. + *

    + * The Big Segment store is the component that receives information about Big Segments, normally + * from a database populated by the LaunchDarkly Relay Proxy. + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + *

    + * An implementation of this interface is returned by + * {@link LDClient#getBigSegmentStoreStatusProvider()}. Application code never needs to implement + * this interface. + * + * @since 5.7.0 + */ +public interface BigSegmentStoreStatusProvider { + /** + * Returns the current status of the store. + * + * @return the latest status; will never be null + */ + Status getStatus(); + + /** + * Subscribes for notifications of status changes. + * + * @param listener the listener to add + */ + void addStatusListener(StatusListener listener); + + /** + * Unsubscribes from notifications of status changes. + * + * @param listener the listener to remove; if no such listener was added, this does nothing + */ + void removeStatusListener(StatusListener listener); + + /** + * Information about the status of a Big Segment store, provided by + * {@link BigSegmentStoreStatusProvider} + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + */ + public static final class Status { + private final boolean available; + private final boolean stale; + + /** + * Constructor for a Big Segment status. + * + * @param available whether the Big Segment store is available + * @param stale whether the Big Segment store has not been recently updated + */ + public Status(boolean available, boolean stale) { + this.available = available; + this.stale = stale; + } + + /** + * True if the Big Segment store is able to respond to queries, so that the SDK can evaluate + * whether a user is in a segment or not. + *

    + * If this property is false, the store is not able to make queries (for instance, it may not + * have a valid database connection). In this case, the SDK will treat any reference to a Big + * Segment as if no users are included in that segment. Also, the {@link EvaluationReason} + * associated with any flag evaluation that references a Big Segment when the store is not + * available will have a {@link EvaluationReason.BigSegmentsStatus} of + * {@link EvaluationReason.BigSegmentsStatus#STORE_ERROR}. + * + * @return whether the Big Segment store is able to respond to queries + */ + public boolean isAvailable() { + return available; + } + + /** + * True if the Big Segment store is available, but has not been updated within the amount of + * time specified by + * {@link BigSegmentsConfigurationBuilder#staleAfter(Duration)}. + *

    + * This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped + * running or has become unable to receive fresh data from LaunchDarkly. Any feature flag + * evaluations that reference a Big Segment will be using the last known data, which may be out + * of date. + * + * @return whether the data in the Big Segment store is considered to be stale + */ + public boolean isStale() { + return stale; + } + + @Override + public boolean equals(Object other) { + if (other instanceof Status) { + Status o = (Status)other; + return available == o.available && stale == o.stale; + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(available, stale); + } + + @Override + public String toString() { + return "Status(Available=" + available + ",Stale=" + stale + ")"; + } + } + + /** + * Interface for receiving Big Segment status change notifications. + */ + public static interface StatusListener { + /** + * Called when any property of the Big Segment store status has changed. + * + * @param newStatus the new status of the Big Segment store + */ + void bigSegmentStoreStatusChanged(Status newStatus); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentsConfiguration.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentsConfiguration.java new file mode 100644 index 0000000..965737d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/BigSegmentsConfiguration.java @@ -0,0 +1,94 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; + +import java.time.Duration; + +/** + * Encapsulates the SDK's configuration with regard to Big Segments. + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + *

    + * See {@link BigSegmentsConfigurationBuilder} for more details on these properties. + * + * @see BigSegmentsConfigurationBuilder + * @since 5.7.0 + */ +public final class BigSegmentsConfiguration { + private final BigSegmentStore bigSegmentStore; + private final int userCacheSize; + private final Duration userCacheTime; + private final Duration statusPollInterval; + private final Duration staleAfter; + + /** + * Creates a new {@link BigSegmentsConfiguration} instance with the specified values. + *

    + * See {@link BigSegmentsConfigurationBuilder} for more information on the configuration fields. + * + * @param bigSegmentStore the Big Segments store instance + * @param userCacheSize the user cache size + * @param userCacheTime the user cache time + * @param statusPollInterval the status poll interval + * @param staleAfter the interval after which store data is considered stale + */ + public BigSegmentsConfiguration(BigSegmentStore bigSegmentStore, + int userCacheSize, + Duration userCacheTime, + Duration statusPollInterval, + Duration staleAfter) { + this.bigSegmentStore = bigSegmentStore; + this.userCacheSize = userCacheSize; + this.userCacheTime = userCacheTime; + this.statusPollInterval = statusPollInterval; + this.staleAfter = staleAfter; + } + + /** + * Gets the data store instance that is used for Big Segments data. + * + * @return the configured Big Segment store + */ + public BigSegmentStore getStore() { + return this.bigSegmentStore; + } + + /** + * Gets the value set by {@link BigSegmentsConfigurationBuilder#userCacheSize(int)} + * + * @return the configured user cache size limit + */ + public int getUserCacheSize() { + return this.userCacheSize; + } + + /** + * Gets the value set by {@link BigSegmentsConfigurationBuilder#userCacheTime(Duration)} + * + * @return the configured user cache time duration + */ + public Duration getUserCacheTime() { + return this.userCacheTime; + } + + /** + * Gets the value set by {@link BigSegmentsConfigurationBuilder#statusPollInterval(Duration)} + * + * @return the configured status poll interval + */ + public Duration getStatusPollInterval() { + return this.statusPollInterval; + } + + /** + * Gets the value set by {@link BigSegmentsConfigurationBuilder#staleAfter(Duration)} + * + * @return the configured stale after interval + */ + public Duration getStaleAfter() { + return this.staleAfter; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ConsistencyCheck.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ConsistencyCheck.java new file mode 100644 index 0000000..2128633 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ConsistencyCheck.java @@ -0,0 +1,21 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * Consistency check result. + */ +public enum ConsistencyCheck { + /** + * Consistency was checked and found to be inconsistent. + */ + INCONSISTENT, + + /** + * Consistency was checked and found to be consistent. + */ + CONSISTENT, + + /** + * Consistency check was not performed. + */ + NOT_CHECKED +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProvider.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProvider.java new file mode 100644 index 0000000..a3eab8a --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProvider.java @@ -0,0 +1,386 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.google.common.base.Strings; + +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; + +/** + * An interface for querying the status of the SDK's data source. The data source is the component + * that receives updates to feature flag data; normally this is a streaming connection, but it could + * be polling or file data depending on your configuration. + *

    + * An implementation of this interface is returned by {@link com.launchdarkly.sdk.server.interfaces.LDClientInterface#getDataSourceStatusProvider}. + * Application code never needs to implement this interface. + * + * @since 5.0.0 + */ +public interface DataSourceStatusProvider { + /** + * Returns the current status of the data source. + *

    + * All of the built-in data source implementations are guaranteed to update this status whenever they + * successfully initialize, encounter an error, or recover after an error. + *

    + * For a custom data source implementation, it is the responsibility of the data source to push + * status updates to the SDK; if it does not do so, the status will always be reported as + * {@link State#INITIALIZING}. + * + * @return the latest status; will never be null + */ + public Status getStatus(); + + /** + * Subscribes for notifications of status changes. + *

    + * The listener will be notified whenever any property of the status has changed. See {@link Status} for an + * explanation of the meaning of each property and what could cause it to change. + *

    + * Notifications will be dispatched on a worker thread. It is the listener's responsibility to return as soon as + * possible so as not to block subsequent notifications. + * + * @param listener the listener to add + */ + public void addStatusListener(StatusListener listener); + + /** + * Unsubscribes from notifications of status changes. + * + * @param listener the listener to remove; if no such listener was added, this does nothing + */ + public void removeStatusListener(StatusListener listener); + + /** + * A synchronous method for waiting for a desired connection state. + *

    + * If the current state is already {@code desiredState} when this method is called, it immediately returns. + * Otherwise, it blocks until 1. the state has become {@code desiredState}, 2. the state has become + * {@link State#OFF} (since that is a permanent condition), 3. the specified timeout elapses, or 4. + * the current thread is deliberately interrupted with {@link Thread#interrupt()}. + *

    + * A scenario in which this might be useful is if you want to create the {@code LDClient} without waiting + * for it to initialize, and then wait for initialization at a later time or on a different thread: + *

    
    +   *     // create the client but do not wait
    +   *     LDConfig config = new LDConfig.Builder().startWait(Duration.ZERO).build();
    +   *     client = new LDClient(sdkKey, config);
    +   *     
    +   *     // later, possibly on another thread:
    +   *     boolean inited = client.getDataSourceStatusProvider().waitFor(
    +   *         DataSourceStatusProvider.State.VALID, Duration.ofSeconds(10));
    +   *     if (!inited) {
    +   *         // do whatever is appropriate if initialization has timed out
    +   *     }       
    +   * 
    + * + * @param desiredState the desired connection state (normally this would be {@link State#VALID}) + * @param timeout the maximum amount of time to wait-- or {@link Duration#ZERO} to block indefinitely + * (unless the thread is explicitly interrupted) + * @return true if the connection is now in the desired state; false if it timed out, or if the state + * changed to {@link State#OFF} and that was not the desired state + * @throws InterruptedException if {@link Thread#interrupt()} was called on this thread while blocked + */ + public boolean waitFor(State desiredState, Duration timeout) throws InterruptedException; + + /** + * An enumeration of possible values for {@link DataSourceStatusProvider.Status#getState()}. + */ + public enum State { + /** + * The initial state of the data source when the SDK is being initialized. + *

    + * If it encounters an error that requires it to retry initialization, the state will remain at + * {@link #INITIALIZING} until it either succeeds and becomes {@link #VALID}, or permanently fails and + * becomes {@link #OFF}. + */ + INITIALIZING, + + /** + * Indicates that the data source is currently operational and has not had any problems since the + * last time it received data. + *

    + * In streaming mode, this means that there is currently an open stream connection and that at least + * one initial message has been received on the stream. In polling mode, it means that the last poll + * request succeeded. + */ + VALID, + + /** + * Indicates that the data source encountered an error that it will attempt to recover from. + *

    + * In streaming mode, this means that the stream connection failed, or had to be dropped due to some + * other error, and will be retried after a backoff delay. In polling mode, it means that the last poll + * request failed, and a new poll request will be made after the configured polling interval. + */ + INTERRUPTED, + + /** + * Indicates that the data source has been permanently shut down. + *

    + * This could be because it encountered an unrecoverable error (for instance, the LaunchDarkly service + * rejected the SDK key; an invalid SDK key will never become valid), or because the SDK client was + * explicitly shut down. + */ + OFF; + } + + /** + * An enumeration describing the general type of an error reported in {@link ErrorInfo}. + * + * @see ErrorInfo#getKind() + */ + public static enum ErrorKind { + /** + * An unexpected error, such as an uncaught exception, further described by {@link ErrorInfo#getMessage()}. + */ + UNKNOWN, + + /** + * An I/O error such as a dropped connection. + */ + NETWORK_ERROR, + + /** + * The LaunchDarkly service returned an HTTP response with an error status, available with + * {@link ErrorInfo#getStatusCode()}. + */ + ERROR_RESPONSE, + + /** + * The SDK received malformed data from the LaunchDarkly service. + */ + INVALID_DATA, + + /** + * The data source itself is working, but when it tried to put an update into the data store, the data + * store failed (so the SDK may not have the latest data). + *

    + * Data source implementations do not need to report this kind of error; it will be automatically + * reported by the SDK when exceptions are detected. + */ + STORE_ERROR + } + + /** + * A description of an error condition that the data source encountered. + * + * @see Status#getLastError() + */ + public static final class ErrorInfo { + private final ErrorKind kind; + private final int statusCode; + private final String message; + private final Instant time; + + /** + * Constructs an instance. + * + * @param kind the general category of the error + * @param statusCode an HTTP status or zero + * @param message an error message if applicable, or null + * @param time the error timestamp + */ + public ErrorInfo(ErrorKind kind, int statusCode, String message, Instant time) { + this.kind = kind; + this.statusCode = statusCode; + this.message = message; + this.time = time; + } + + /** + * Constructs an instance based on an exception. + * + * @param kind the general category of the error + * @param t the exception + * @return an ErrorInfo + */ + public static ErrorInfo fromException(ErrorKind kind, Throwable t) { + return new ErrorInfo(kind, 0, t.toString(), Instant.now()); + } + + /** + * Constructs an instance based on an HTTP error status. + * + * @param statusCode the status code + * @return an ErrorInfo + */ + public static ErrorInfo fromHttpError(int statusCode) { + return new ErrorInfo(ErrorKind.ERROR_RESPONSE, statusCode, null, Instant.now()); + } + + /** + * Returns an enumerated value representing the general category of the error. + * + * @return the general category of the error + */ + public ErrorKind getKind() { + return kind; + } + + /** + * Returns the HTTP status code if the error was {@link ErrorKind#ERROR_RESPONSE}, or zero otherwise. + * + * @return an HTTP status or zero + */ + public int getStatusCode() { + return statusCode; + } + + /** + * Returns any additional human-readable information relevant to the error. The format of this message + * is subject to change and should not be relied on programmatically. + * + * @return an error message if applicable, or null + */ + public String getMessage() { + return message; + } + + /** + * Returns the date/time that the error occurred. + * + * @return the error timestamp + */ + public Instant getTime() { + return time; + } + + @Override + public boolean equals(Object other) { + if (other instanceof ErrorInfo) { + ErrorInfo o = (ErrorInfo)other; + return kind == o.kind && statusCode == o.statusCode && Objects.equals(message, o.message) && + Objects.equals(time, o.time); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(kind, statusCode, message, time); + } + + @Override + public String toString() { + StringBuilder s = new StringBuilder(); + s.append(kind.toString()); + if (statusCode > 0 || !Strings.isNullOrEmpty(message)) { + s.append("("); + if (statusCode > 0) { + s.append(statusCode); + } + if (!Strings.isNullOrEmpty(message)) { + if (statusCode > 0) { + s.append(","); + } + s.append(message); + } + s.append(")"); + } + if (time != null) { + s.append("@"); + s.append(time.toString()); + } + return s.toString(); + } + } + + /** + * Information about the data source's status and about the last status change. + */ + public static final class Status { + private final State state; + private final Instant stateSince; + private final ErrorInfo lastError; + + /** + * Constructs a new instance. + * + * @param state the basic state as an enumeration + * @param stateSince timestamp of the last state transition + * @param lastError a description of the last error, or null if no errors have occurred since startup + */ + public Status(State state, Instant stateSince, ErrorInfo lastError) { + this.state = state; + this.stateSince = stateSince; + this.lastError = lastError; + } + + /** + * Returns an enumerated value representing the overall current state of the data source. + * + * @return the basic state + */ + public State getState() { + return state; + } + + /** + * Returns the date/time that the value of {@link #getState()} most recently changed. + *

    + * The meaning of this depends on the current state: + *

      + *
    • For {@link State#INITIALIZING}, it is the time that the SDK started initializing. + *
    • For {@link State#VALID}, it is the time that the data source most recently entered a valid + * state, after previously having been either {@link State#INITIALIZING} or {@link State#INTERRUPTED}. + *
    • For {@link State#INTERRUPTED}, it is the time that the data source most recently entered an + * error state, after previously having been {@link State#VALID}. + *
    • For {@link State#OFF}, it is the time that the data source encountered an unrecoverable error + * or that the SDK was explicitly shut down. + *
    + * + * @return the timestamp of the last state change + */ + public Instant getStateSince() { + return stateSince; + } + + /** + * Returns information about the last error that the data source encountered, if any. + *

    + * This property should be updated whenever the data source encounters a problem, even if it does + * not cause {@link #getState()} to change. For instance, if a stream connection fails and the + * state changes to {@link State#INTERRUPTED}, and then subsequent attempts to restart the + * connection also fail, the state will remain {@link State#INTERRUPTED} but the error information + * will be updated each time-- and the last error will still be reported in this property even if + * the state later becomes {@link State#VALID}. + * + * @return a description of the last error, or null if no errors have occurred since startup + */ + public ErrorInfo getLastError() { + return lastError; + } + + @Override + public boolean equals(Object other) { + if (other instanceof Status) { + Status o = (Status)other; + return state == o.state && Objects.equals(stateSince, o.stateSince) && Objects.equals(lastError, o.lastError); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(state, stateSince, lastError); + } + + @Override + public String toString() { + return "Status(" + state + "," + stateSince + "," + lastError + ")"; + } + } + + /** + * Interface for receiving status change notifications. + */ + public static interface StatusListener { + /** + * Called when any property of the data source status has changed. + * + * @param newStatus the new status + */ + public void dataSourceStatusChanged(Status newStatus); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProvider.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProvider.java new file mode 100644 index 0000000..0c71fa3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProvider.java @@ -0,0 +1,261 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder; + +import java.util.Objects; + +/** + * An interface for querying the status of a persistent data store. + *

    + * An implementation of this interface is returned by {@link com.launchdarkly.sdk.server.interfaces.LDClientInterface#getDataStoreStatusProvider}. + * Application code should not implement this interface. + * + * @since 5.0.0 + */ +public interface DataStoreStatusProvider { + /** + * Returns the current status of the store. + *

    + * This is only meaningful for persistent stores, or any custom data store implementation that makes use of + * the status reporting mechanism provided by the SDK. For the default in-memory store, the status will always + * be reported as "available". + * + * @return the latest status; will never be null + */ + public Status getStatus(); + + /** + * Indicates whether the current data store implementation supports status monitoring. + *

    + * This is normally true for all persistent data stores, and false for the default in-memory store. A true value + * means that any listeners added with {@link #addStatusListener(StatusListener)} can expect to be notified if + * there is any error in storing data, and then notified again when the error condition is resolved. A false + * value means that the status is not meaningful and listeners should not expect to be notified. + * + * @return true if status monitoring is enabled + */ + public boolean isStatusMonitoringEnabled(); + + /** + * Subscribes for notifications of status changes. + *

    + * Applications may wish to know if there is an outage in a persistent data store, since that could mean that + * flag evaluations are unable to get the flag data from the store (unless it is currently cached) and therefore + * might return default values. + *

    + * If the SDK receives an exception while trying to query or update the data store, then it notifies listeners + * that the store appears to be offline ({@link Status#isAvailable()} is false) and begins polling the store + * at intervals until a query succeeds. Once it succeeds, it notifies listeners again with {@link Status#isAvailable()} + * set to true. + *

    + * This method has no effect if the data store implementation does not support status tracking, such as if you + * are using the default in-memory store rather than a persistent store. + * + * @param listener the listener to add + */ + public void addStatusListener(StatusListener listener); + + /** + * Unsubscribes from notifications of status changes. + *

    + * This method has no effect if the data store implementation does not support status tracking, such as if you + * are using the default in-memory store rather than a persistent store. + * + * @param listener the listener to remove; if no such listener was added, this does nothing + */ + public void removeStatusListener(StatusListener listener); + + /** + * Queries the current cache statistics, if this is a persistent store with caching enabled. + *

    + * This method returns null if the data store implementation does not support cache statistics because it is + * not a persistent store, or because you did not enable cache monitoring with + * {@link PersistentDataStoreBuilder#recordCacheStats(boolean)}. + * + * @return a {@link CacheStats} instance; null if not applicable + */ + public CacheStats getCacheStats(); + + /** + * Information about a status change. + */ + public static final class Status { + private final boolean available; + private final boolean refreshNeeded; + + /** + * Creates an instance. + * @param available see {@link #isAvailable()} + * @param refreshNeeded see {@link #isRefreshNeeded()} + */ + public Status(boolean available, boolean refreshNeeded) { + this.available = available; + this.refreshNeeded = refreshNeeded; + } + + /** + * Returns true if the SDK believes the data store is now available. + *

    + * This property is normally true. If the SDK receives an exception while trying to query or update the data + * store, then it sets this property to false (notifying listeners, if any) and polls the store at intervals + * until a query succeeds. Once it succeeds, it sets the property back to true (again notifying listeners). + * + * @return true if store is available + */ + public boolean isAvailable() { + return available; + } + + /** + * Returns true if the store may be out of date due to a previous outage, so the SDK should attempt to refresh + * all feature flag data and rewrite it to the store. + *

    + * This property is not meaningful to application code. + * + * @return true if data should be rewritten + */ + public boolean isRefreshNeeded() { + return refreshNeeded; + } + + @Override + public boolean equals(Object other) { + if (other instanceof Status) { + Status o = (Status)other; + return available == o.available && refreshNeeded == o.refreshNeeded; + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(available, refreshNeeded); + } + + @Override + public String toString() { + return "Status(" + available + "," + refreshNeeded + ")"; + } + } + + /** + * Interface for receiving status change notifications. + */ + public static interface StatusListener { + /** + * Called when the store status has changed. + * @param newStatus the new status + */ + public void dataStoreStatusChanged(Status newStatus); + } + + /** + * A snapshot of cache statistics. The statistics are cumulative across the lifetime of the data store. + *

    + * This is based on the data provided by Guava's caching framework. The SDK currently uses Guava + * internally, but is not guaranteed to always do so, and to avoid embedding Guava API details in + * the SDK API this is provided as a separate class. + * + * @see DataStoreStatusProvider#getCacheStats() + * @see PersistentDataStoreBuilder#recordCacheStats(boolean) + * @since 4.12.0 + */ + public static final class CacheStats { + private final long hitCount; + private final long missCount; + private final long loadSuccessCount; + private final long loadExceptionCount; + private final long totalLoadTime; + private final long evictionCount; + + /** + * Constructs a new instance. + * + * @param hitCount number of queries that produced a cache hit + * @param missCount number of queries that produced a cache miss + * @param loadSuccessCount number of cache misses that loaded a value without an exception + * @param loadExceptionCount number of cache misses that tried to load a value but got an exception + * @param totalLoadTime number of nanoseconds spent loading new values + * @param evictionCount number of cache entries that have been evicted + */ + public CacheStats(long hitCount, long missCount, long loadSuccessCount, long loadExceptionCount, + long totalLoadTime, long evictionCount) { + this.hitCount = hitCount; + this.missCount = missCount; + this.loadSuccessCount = loadSuccessCount; + this.loadExceptionCount = loadExceptionCount; + this.totalLoadTime = totalLoadTime; + this.evictionCount = evictionCount; + } + + /** + * The number of data queries that received cached data instead of going to the underlying data store. + * @return the number of cache hits + */ + public long getHitCount() { + return hitCount; + } + + /** + * The number of data queries that did not find cached data and went to the underlying data store. + * @return the number of cache misses + */ + public long getMissCount() { + return missCount; + } + + /** + * The number of times a cache miss resulted in successfully loading a data store item (or finding + * that it did not exist in the store). + * @return the number of successful loads + */ + public long getLoadSuccessCount() { + return loadSuccessCount; + } + + /** + * The number of times that an error occurred while querying the underlying data store. + * @return the number of failed loads + */ + public long getLoadExceptionCount() { + return loadExceptionCount; + } + + /** + * The total number of nanoseconds that the cache has spent loading new values. + * @return total time spent for all cache loads + */ + public long getTotalLoadTime() { + return totalLoadTime; + } + + /** + * The number of times cache entries have been evicted. + * @return the number of evictions + */ + public long getEvictionCount() { + return evictionCount; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof CacheStats)) { + return false; + } + CacheStats o = (CacheStats)other; + return hitCount == o.hitCount && missCount == o.missCount && loadSuccessCount == o.loadSuccessCount && + loadExceptionCount == o.loadExceptionCount && totalLoadTime == o.totalLoadTime && evictionCount == o.evictionCount; + } + + @Override + public int hashCode() { + return Objects.hash(hitCount, missCount, loadSuccessCount, loadExceptionCount, totalLoadTime, evictionCount); + } + + @Override + public String toString() { + return "{hit=" + hitCount + ", miss=" + missCount + ", loadSuccess=" + loadSuccessCount + + ", loadException=" + loadExceptionCount + ", totalLoadTime=" + totalLoadTime + ", evictionCount=" + evictionCount + "}"; + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeEvent.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeEvent.java new file mode 100644 index 0000000..2fa3174 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeEvent.java @@ -0,0 +1,37 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * Parameter class used with {@link FlagChangeListener}. + *

    + * This is not an analytics event to be sent to LaunchDarkly; it is a notification to the application. + * + * @since 5.0.0 + * @see FlagChangeListener + * @see FlagValueChangeEvent + * @see FlagTracker#addFlagChangeListener(FlagChangeListener) + */ +public class FlagChangeEvent { + private final String key; + + /** + * Constructs a new instance. + * + * @param key the feature flag key + */ + public FlagChangeEvent(String key) { + this.key = key; + } + + /** + * Returns the key of the feature flag whose configuration has changed. + *

    + * The specified flag may have been modified directly, or this may be an indirect change due to a change + * in some other flag that is a prerequisite for this flag, or a user segment that is referenced in the + * flag's rules. + * + * @return the flag key + */ + public String getKey() { + return key; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeListener.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeListener.java new file mode 100644 index 0000000..8124993 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagChangeListener.java @@ -0,0 +1,36 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * An event listener that is notified when a feature flag's configuration has changed. + *

    + * As described in {@link FlagTracker#addFlagChangeListener(FlagChangeListener)}, this notification + * does not mean that the flag now returns a different value for any particular user, only that it + * may do so. LaunchDarkly feature flags can be configured to return a single value for all + * users, or to have complex targeting behavior. To know what effect the change would have for any + * given set of user properties, you would need to re-evaluate the flag by calling one of the + * {@code variation} methods on the client. + * + *

    
    + *     FlagChangeListener listenForChanges = event -> {
    + *         System.out.println("a flag has changed: " + event.getKey());
    + *     };
    + *     client.getFlagTracker().addFlagChangeListener(listenForChanges);
    + * 
    + * + * In simple use cases where you know that the flag configuration does not vary per user, or where you + * know ahead of time what user properties you will evaluate the flag with, it may be more convenient + * to use {@link FlagValueChangeListener}. + * + * @since 5.0.0 + * @see FlagValueChangeListener + * @see FlagTracker#addFlagChangeListener(FlagChangeListener) + * @see FlagTracker#removeFlagChangeListener(FlagChangeListener) + */ +public interface FlagChangeListener { + /** + * The SDK calls this method when a feature flag's configuration has changed in some way. + * + * @param event the event parameters + */ + void onFlagChange(FlagChangeEvent event); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagTracker.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagTracker.java new file mode 100644 index 0000000..df4998d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagTracker.java @@ -0,0 +1,85 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.Components; + +/** + * An interface for tracking changes in feature flag configurations. + *

    + * An implementation of this interface is returned by {@link com.launchdarkly.sdk.server.interfaces.LDClientInterface#getFlagTracker()}. + * Application code never needs to implement this interface. + * + * @since 5.0.0 + */ +public interface FlagTracker { + /** + * Registers a listener to be notified of feature flag changes in general. + *

    + * The listener will be notified whenever the SDK receives any change to any feature flag's configuration, + * or to a user segment that is referenced by a feature flag. If the updated flag is used as a prerequisite + * for other flags, the SDK assumes that those flags may now behave differently and sends flag change events + * for them as well. + *

    + * Note that this does not necessarily mean the flag's value has changed for any particular evaluation + * context, only that some part of the flag configuration was changed so that it may return a + * different value than it previously returned for some context. If you want to track flag value changes, + * use {@link #addFlagValueChangeListener(String, LDContext, FlagValueChangeListener)} instead. + *

    + * If using the file data source ({@link com.launchdarkly.sdk.server.integrations.FileData}), any change in + * a data file will be treated as a change to every flag. Again, use + * {@link #addFlagValueChangeListener(String, LDContext, FlagValueChangeListener)} (or just re-evaluate the flag + * yourself) if you want to know whether this is a change that really affects a flag's value. + *

    + * Change events only work if the SDK is actually connecting to LaunchDarkly (or using the file data source). + * If the SDK is only reading flags from a database ({@link Components#externalUpdatesOnly()}) then it cannot + * know when there is a change, because flags are read on an as-needed basis. + *

    + * The listener will be called from a worker thread. + *

    + * Calling this method for an already-registered listener has no effect. + * + * @param listener the event listener to register + * @see #removeFlagChangeListener(FlagChangeListener) + * @see FlagChangeListener + * @see #addFlagValueChangeListener(String, LDContext, FlagValueChangeListener) + */ + public void addFlagChangeListener(FlagChangeListener listener); + + /** + * Unregisters a listener so that it will no longer be notified of feature flag changes. + *

    + * Calling this method for a listener that was not previously registered has no effect. + * + * @param listener the event listener to unregister + * @see #addFlagChangeListener(FlagChangeListener) + * @see #addFlagValueChangeListener(String, LDContext, FlagValueChangeListener) + * @see FlagChangeListener + */ + public void removeFlagChangeListener(FlagChangeListener listener); + + /** + * Registers a listener to be notified of a change in a specific feature flag's value for a specific + * evaluation context. + *

    + * When you call this method, it first immediately evaluates the feature flag. It then uses + * {@link #addFlagChangeListener(FlagChangeListener)} to start listening for feature flag configuration + * changes, and whenever the specified feature flag changes, it re-evaluates the flag for the same context. + * It then calls your {@link FlagValueChangeListener} if and only if the resulting value has changed. + *

    + * All feature flag evaluations require an instance of {@link LDContext}. If the feature flag you are + * tracking does not have any user targeting rules, you must still pass a dummy context such as + * {@code LDContext.create("for-global-flags")}. If you do not want the user to appear on your dashboard, + * use the {@code anonymous} property: {@code LDContext.builder("for-global-flags").anonymous(true).build()}. + *

    + * The returned {@link FlagChangeListener} represents the subscription that was created by this method + * call; to unsubscribe, pass that object (not your {@code FlagValueChangeListener}) to + * {@link #removeFlagChangeListener(FlagChangeListener)}. + * + * @param flagKey the flag key to be evaluated + * @param context the evaluation context + * @param listener an object that you provide which will be notified of changes + * @return a {@link FlagChangeListener} that can be used to unregister the listener + * @since 6.0.0 + */ + public FlagChangeListener addFlagValueChangeListener(String flagKey, LDContext context, FlagValueChangeListener listener); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeEvent.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeEvent.java new file mode 100644 index 0000000..83e58c9 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeEvent.java @@ -0,0 +1,62 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.LDValue; + +/** + * Parameter class used with {@link FlagValueChangeListener}. + *

    + * This is not an analytics event to be sent to LaunchDarkly; it is a notification to the application. + * + * @since 5.0.0 + * @see FlagValueChangeListener + * @see FlagTracker#addFlagValueChangeListener(String, com.launchdarkly.sdk.LDContext, FlagValueChangeListener) + */ +public class FlagValueChangeEvent extends FlagChangeEvent { + private final LDValue oldValue; + private final LDValue newValue; + + /** + * Constructs a new instance. + * + * @param key the feature flag key + * @param oldValue the previous flag value + * @param newValue the new flag value + */ + public FlagValueChangeEvent(String key, LDValue oldValue, LDValue newValue) { + super(key); + this.oldValue = LDValue.normalize(oldValue); + this.newValue = LDValue.normalize(newValue); + } + + /** + * Returns the last known value of the flag for the specified evaluation context prior to the update. + *

    + * Since flag values can be of any JSON data type, this is represented as {@link LDValue}. That class + * has methods for converting to a primitive Java type such as {@link LDValue#booleanValue()}. + *

    + * If the flag did not exist before or could not be evaluated, this will be {@link LDValue#ofNull()}. + * Note that there is no application default value parameter as there is for the {@code variation} + * methods; it is up to your code to substitute whatever fallback value is appropriate. + * + * @return the previous flag value + */ + public LDValue getOldValue() { + return oldValue; + } + + /** + * Returns the new value of the flag for the specified evaluation context. + *

    + * Since flag values can be of any JSON data type, this is represented as {@link LDValue}. That class + * has methods for converting to a primitive Java type such {@link LDValue#booleanValue()}. + *

    + * If the flag was deleted or could not be evaluated, this will be {@link LDValue#ofNull()}. + * Note that there is no application default value parameter as there is for the {@code variation} + * methods; it is up to your code to substitute whatever fallback value is appropriate. + * + * @return the new flag value + */ + public LDValue getNewValue() { + return newValue; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeListener.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeListener.java new file mode 100644 index 0000000..8813d32 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/FlagValueChangeListener.java @@ -0,0 +1,41 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * An event listener that is notified when a feature flag's value has changed for a specific + * evaluation context. + *

    + * Use this in conjunction with {@link FlagTracker#addFlagValueChangeListener(String, com.launchdarkly.sdk.LDContext, FlagValueChangeListener)} + * if you want the client to re-evaluate a flag for a specific evaluation context whenever the + * flag's configuration has changed, and notify you only if the new value is different from the old + * value. The listener will not be notified if the flag's configuration is changed in some way that does + * not affect its value for that context. + * + *

    
    + *     String flagKey = "my-important-flag";
    + *     LDContext contextForFlagEvaluation = LDContext.create("context-key-for-global-flag-state");
    + *     FlagValueChangeListener listenForNewValue = event -> {
    + *         if (event.getKey().equals(flagKey)) {
    + *             doSomethingWithNewValue(event.getNewValue().booleanValue());
    + *         }
    + *     };
    + *     client.getFlagTracker().addFlagValueChangeListener(flagKey,
    + *         contextForFlagEvaluation, listenForNewValue);
    + * 
    + * + * In the above example, the value provided in {@code event.getNewValue()} is the result of calling + * {@code client.jsonValueVariation(flagKey, contextForFlagEvaluation, LDValue.ofNull())} after the flag + * has changed. + * + * @since 5.0.0 + * @see FlagChangeListener + * @see FlagTracker#addFlagValueChangeListener(String, com.launchdarkly.sdk.LDContext, FlagValueChangeListener) + */ +public interface FlagValueChangeListener { + /** + * The SDK calls this method when a feature flag's value has changed with regard to the specified + * evaluation context. + * + * @param event the event parameters + */ + void onFlagValueChange(FlagValueChangeEvent event); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/HttpAuthentication.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/HttpAuthentication.java new file mode 100644 index 0000000..f0bf34d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/HttpAuthentication.java @@ -0,0 +1,52 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * Represents a supported method of HTTP authentication, including proxy authentication. + * + * @since 4.13.0 + */ +public interface HttpAuthentication { + /** + * Computes the {@code Authorization} or {@code Proxy-Authorization} header for an authentication challenge. + * + * @param challenges the authentication challenges provided by the server, if any (may be empty if this is + * pre-emptive authentication) + * @return the value for the authorization request header + */ + String provideAuthorization(Iterable challenges); + + /** + * Properties of an HTTP authentication challenge. + */ + public static class Challenge { + private final String scheme; + private final String realm; + + /** + * Constructs an instance. + * + * @param scheme the authentication scheme + * @param realm the authentication realm or null + */ + public Challenge(String scheme, String realm) { + this.scheme = scheme; + this.realm = realm; + } + + /** + * The authentication scheme, such as "basic". + * @return the authentication scheme + */ + public String getScheme() { + return scheme; + } + + /** + * The authentication realm, if any. + * @return the authentication realm or null + */ + public String getRealm() { + return realm; + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/LDClientInterface.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/LDClientInterface.java new file mode 100644 index 0000000..79c5b9d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/LDClientInterface.java @@ -0,0 +1,428 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import com.launchdarkly.sdk.server.FeatureFlagsState; +import com.launchdarkly.sdk.server.FlagsStateOption; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.MigrationOpTracker; +import com.launchdarkly.sdk.server.MigrationStage; +import com.launchdarkly.sdk.server.MigrationVariation; +import com.launchdarkly.sdk.server.migrations.Migration; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This interface defines the public methods of {@link LDClient}. + *

    + * Applications will normally interact directly with {@link LDClient}, and must use its constructor to + * initialize the SDK, but being able to refer to it indirectly via an interface may be helpful in test + * scenarios (mocking) or for some dependency injection frameworks. + */ +public interface LDClientInterface extends Closeable { + /** + * Tests whether the client is ready to be used. + * @return true if the client is ready, or false if it is still initializing + */ + boolean isInitialized(); + + /** + * Tracks that an application-defined event occurred. + *

    + * This method creates a "custom" analytics event containing the specified event name (key) + * and context properties. You may attach arbitrary data or a metric value to the event by calling + * {@link #trackData(String, LDContext, LDValue)} or {@link #trackMetric(String, LDContext, LDValue, double)} + * instead. + *

    + * Note that event delivery is asynchronous, so the event may not actually be sent until + * later; see {@link #flush()}. + * + * @param eventName the name of the event + * @param context the context associated with the event + * @see #trackData(String, LDContext, LDValue) + * @see #trackMetric(String, LDContext, LDValue, double) + * @since 6.0.0 + */ + void track(String eventName, LDContext context); + + /** + * Tracks that an application-defined event occurred. + *

    + * This method creates a "custom" analytics event containing the specified event name (key), + * context properties, and optional data. If you do not need custom data, pass {@link LDValue#ofNull()} + * for the last parameter or simply omit the parameter. You may attach a metric value to the event by + * calling {@link #trackMetric(String, LDContext, LDValue, double)} instead. + *

    + * Note that event delivery is asynchronous, so the event may not actually be sent until + * later; see {@link #flush()}. + * + * @param eventName the name of the event + * @param context the context associated with the event + * @param data additional data associated with the event, if any + * @since 6.0.0 + * @see #track(String, LDContext) + * @see #trackMetric(String, LDContext, LDValue, double) + */ + void trackData(String eventName, LDContext context, LDValue data); + + /** + * Tracks that an application-defined event occurred, and provides an additional numeric value for + * custom metrics. + *

    + * This value is used by the LaunchDarkly experimentation feature in numeric custom metrics, + * and will also be returned as part of the custom event for Data Export. + *

    + * Note that event delivery is asynchronous, so the event may not actually be sent until + * later; see {@link #flush()}. + * + * @param eventName the name of the event + * @param context the context associated with the event + * @param data an {@link LDValue} containing additional data associated with the event; if not applicable, + * you may pass either {@code null} or {@link LDValue#ofNull()} + * @param metricValue a numeric value used by the LaunchDarkly experimentation feature in numeric custom + * metrics + * @since 4.9.0 + * @see #track(String, LDContext) + * @see #trackData(String, LDContext, LDValue) + */ + void trackMetric(String eventName, LDContext context, LDValue data, double metricValue); + + /** + * Track the details of a migration. + * + * @param tracker Migration tracker which was used to track details of the migration operation. + * @since 7.0.0 + */ + void trackMigration(MigrationOpTracker tracker); + + /** + * Reports details about an evaluation context. + *

    + * This method simply creates an analytics event containing the context properties, to + * that LaunchDarkly will know about that context if it does not already. + *

    + * Calling any evaluation method, such as {@link #boolVariation(String, LDContext, boolean)}, + * also sends the context information to LaunchDarkly (if events are enabled), so you only + * need to use this method if you want to identify the context without evaluating a flag. + *

    + * Note that event delivery is asynchronous, so the event may not actually be sent until + * later; see {@link #flush()}. + * + * @param context the context to register + * @since 6.0.0 + */ + void identify(LDContext context); + + /** + * Returns an object that encapsulates the state of all feature flags for a given context, which can be + * passed to front-end code. + *

    + * The object returned by this method contains the flag values as well as other metadata that + * is used by the LaunchDarkly JavaScript client, so it can be used for + * bootstrapping. + *

    + * This method will not send analytics events back to LaunchDarkly. + * + * @param context the evaluation context + * @param options optional {@link FlagsStateOption} values affecting how the state is computed - for + * instance, to filter the set of flags to only include the client-side-enabled ones + * @return a {@link FeatureFlagsState} object (will never be null; see {@link FeatureFlagsState#isValid()} + * @since 6.0.0 + */ + FeatureFlagsState allFlagsState(LDContext context, FlagsStateOption... options); + + /** + * Calculates the boolean value of a feature flag for a given context. + *

    + * If the flag variation does not have a boolean value, {@code defaultValue} is returned. + *

    + * If an error makes it impossible to evaluate the flag (for instance, the feature flag key + * does not match any existing flag), {@code defaultValue} is returned. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return the variation for the given context, or {@code defaultValue} if the flag cannot be evaluated + * @since 6.0.0 + */ + boolean boolVariation(String key, LDContext context, boolean defaultValue); + + /** + * Calculates the integer value of a feature flag for a given context. + *

    + * If the flag variation has a numeric value that is not an integer, it is rounded toward zero + * (truncated). + *

    + * If the flag variation does not have a numeric value, {@code defaultValue} is returned. + *

    + * If an error makes it impossible to evaluate the flag (for instance, the feature flag key + * does not match any existing flag), {@code defaultValue} is returned. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return the variation for the given context, or {@code defaultValue} if the flag cannot be evaluated + * @since 6.0.0 + */ + int intVariation(String key, LDContext context, int defaultValue); + + /** + * Calculates the floating-point numeric value of a feature flag for a given context. + *

    + * If the flag variation does not have a numeric value, {@code defaultValue} is returned. + *

    + * If an error makes it impossible to evaluate the flag (for instance, the feature flag key + * does not match any existing flag), {@code defaultValue} is returned. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return the variation for the given context, or {@code defaultValue} if the flag cannot be evaluated + * @since 6.0.0 + */ + double doubleVariation(String key, LDContext context, double defaultValue); + + /** + * Calculates the string value of a feature flag for a given context. + *

    + * If the flag variation does not have a string value, {@code defaultValue} is returned. + *

    + * If an error makes it impossible to evaluate the flag (for instance, the feature flag key + * does not match any existing flag), {@code defaultValue} is returned. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return the variation for the given context, or {@code defaultValue} if the flag cannot be evaluated + * @since 6.0.0 + */ + String stringVariation(String key, LDContext context, String defaultValue); + + /** + * Calculates the value of a feature flag for a given context as any JSON value type. + *

    + * The type {@link LDValue} is used to represent any of the value types that can + * exist in JSON. Use {@link LDValue} methods to examine its type and value. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return the variation for the given context, or {@code defaultValue} if the flag cannot be evaluated + * @since 6.0.0 + */ + LDValue jsonValueVariation(String key, LDContext context, LDValue defaultValue); + + /** + * Calculates the boolean value of a feature flag for a given context, and returns an object that + * describes the way the value was determined. + *

    + * The {@link EvaluationDetail#getReason()} property in the result will also be included in + * analytics events, if you are capturing detailed event data for this flag. + *

    + * The behavior is otherwise identical to {@link #boolVariation(String, LDContext, boolean)}. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return an {@link EvaluationDetail} object + * @since 6.0.0 + */ + EvaluationDetail boolVariationDetail(String key, LDContext context, boolean defaultValue); + + /** + * Calculates the integer numeric value of a feature flag for a given context, and returns an object + * that describes the way the value was determined. + *

    + * The {@link EvaluationDetail#getReason()} property in the result will also be included in + * analytics events, if you are capturing detailed event data for this flag. + *

    + * The behavior is otherwise identical to {@link #intVariation(String, LDContext, int)}. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return an {@link EvaluationDetail} object + * @since 6.0.0 + */ + EvaluationDetail intVariationDetail(String key, LDContext context, int defaultValue); + + /** + * Calculates the floating-point numeric value of a feature flag for a given context, and returns an + * object that describes the way the value was determined. + *

    + * The {@link EvaluationDetail#getReason()} property in the result will also be included in + * analytics events, if you are capturing detailed event data for this flag. + *

    + * The behavior is otherwise identical to {@link #doubleVariation(String, LDContext, double)}. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return an {@link EvaluationDetail} object + * @since 6.0.0 + */ + EvaluationDetail doubleVariationDetail(String key, LDContext context, double defaultValue); + + /** + * Calculates the string value of a feature flag for a given context, and returns an object + * that describes the way the value was determined. + *

    + * The {@link EvaluationDetail#getReason()} property in the result will also be included in + * analytics events, if you are capturing detailed event data for this flag. + *

    + * The behavior is otherwise identical to {@link #stringVariation(String, LDContext, String)}. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return an {@link EvaluationDetail} object + * @since 6.0.0 + */ + EvaluationDetail stringVariationDetail(String key, LDContext context, String defaultValue); + + /** + * Calculates the value of a feature flag for a given context as any JSON value type, and returns an + * object that describes the way the value was determined. + *

    + * The {@link EvaluationDetail#getReason()} property in the result will also be included in + * analytics events, if you are capturing detailed event data for this flag. + *

    + * The behavior is otherwise identical to {@link #jsonValueVariation(String, LDContext, LDValue)}. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultValue the default value of the flag + * @return an {@link EvaluationDetail} object + * @since 6.0.0 + */ + EvaluationDetail jsonValueVariationDetail(String key, LDContext context, LDValue defaultValue); + + /** + * Returns the migration stage of the migration feature flag for the given + * evaluation context. + *

    + * If the evaluated value of the flag cannot be converted to an LDMigrationStage, then the default + * value will be returned and error will be logged. + * + * @param key the unique key for the feature flag + * @param context the evaluation context + * @param defaultStage the default stage of the migration + * @return the current stage and a tracker which can be used to track the migration operation + * @since 7.0.0 + */ + MigrationVariation migrationVariation(String key, LDContext context, MigrationStage defaultStage); + + /** + * Returns true if the specified feature flag currently exists. + * @param featureKey the unique key for the feature flag + * @return true if the flag exists + */ + boolean isFlagKnown(String featureKey); + + /** + * Closes the LaunchDarkly client event processing thread. This should only + * be called on application shutdown. + * + * @throws IOException if an exception is thrown by one of the underlying network services + */ + @Override + void close() throws IOException; + + /** + * Flushes all pending events. + */ + void flush(); + + /** + * Returns true if the client is in offline mode. + * @return whether the client is in offline mode + */ + boolean isOffline(); + + /** + * Returns an interface for tracking changes in feature flag configurations. + *

    + * The {@link FlagTracker} contains methods for requesting notifications about feature flag changes using + * an event listener model. + * + * @return a {@link FlagTracker} + * @since 5.0.0 + */ + FlagTracker getFlagTracker(); + + /** + * Returns an interface for tracking the status of the Big Segment store. + *

    + * The returned object has methods for checking whether the Big Segment store is (as far as the + * SDK knows) currently operational and tracking changes in this status. See + * {@link BigSegmentStoreStatusProvider} for more about this functionality. + * + * @return a {@link BigSegmentStoreStatusProvider} + * @since 5.7.0 + */ + BigSegmentStoreStatusProvider getBigSegmentStoreStatusProvider(); + + /** + * Returns an interface for tracking the status of the data source. + *

    + * The data source is the mechanism that the SDK uses to get feature flag configurations, such as a + * streaming connection (the default) or poll requests. The {@link DataSourceStatusProvider} has methods + * for checking whether the data source is (as far as the SDK knows) currently operational and tracking + * changes in this status. + * + * @return a {@link DataSourceStatusProvider} + * @since 5.0.0 + */ + DataSourceStatusProvider getDataSourceStatusProvider(); + + /** + * Returns an interface for tracking the status of a persistent data store. + *

    + * The {@link DataStoreStatusProvider} has methods for checking whether the data store is (as far as the + * SDK knows) currently operational, tracking changes in this status, and getting cache statistics. These + * are only relevant for a persistent data store; if you are using an in-memory data store, then this + * method will return a stub object that provides no information. + * + * @return a {@link DataStoreStatusProvider} + * @since 5.0.0 + */ + DataStoreStatusProvider getDataStoreStatusProvider(); + + /** + * Returns the logger instance used by this SDK instance. + *

    + * This allows for access to the logger by other LaunchDarkly components, such as the {@link Migration} + * class. + *

    + * It also allows for usage of the logger in wrapper implementations. + *

    + * It is not intended for general purpose application logging. + * + * @return a {@link LDLogger} + * @since 7.0.0 + */ + LDLogger getLogger(); + + /** + * Creates a hash string that can be used by the JavaScript SDK to identify a context. + *

    + * See + * Secure mode in the JavaScript SDK Reference. + * + * @param context the evaluation context + * @return the hash, or null if the hash could not be calculated + * @since 6.0.0 + */ + String secureModeHash(LDContext context); + + /** + * The current version string of the SDK. + * + * @return a string in Semantic Versioning 2.0.0 format + */ + String version(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ServiceEndpoints.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ServiceEndpoints.java new file mode 100644 index 0000000..553de99 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/ServiceEndpoints.java @@ -0,0 +1,51 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import java.net.URI; + +/** + * Specifies the base service URIs used by SDK components. + *

    + * See {@link ServiceEndpointsBuilder} for more details on these properties. + */ +public final class ServiceEndpoints { + private URI streamingBaseUri; + private URI pollingBaseUri; + private URI eventsBaseUri; + + /** + * Used internally by the SDK to store service endpoints. + * @param streamingBaseUri the base URI for the streaming service + * @param pollingBaseUri the base URI for the polling service + * @param eventsBaseUri the base URI for the events service + */ + public ServiceEndpoints(URI streamingBaseUri, URI pollingBaseUri, URI eventsBaseUri) { + this.streamingBaseUri = streamingBaseUri; + this.pollingBaseUri = pollingBaseUri; + this.eventsBaseUri = eventsBaseUri; + } + + /** + * The base URI for the streaming service. + * @return the base URI, or null + */ + public URI getStreamingBaseUri() { + return streamingBaseUri; + } + + /** + * The base URI for the polling service. + * @return the base URI, or null + */ + public URI getPollingBaseUri() { + return pollingBaseUri; + } + + /** + * The base URI for the events service. + * @return the base URI, or null + */ + public URI getEventsBaseUri() { + return eventsBaseUri; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/WrapperInfo.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/WrapperInfo.java new file mode 100644 index 0000000..28f8f1d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/WrapperInfo.java @@ -0,0 +1,40 @@ +package com.launchdarkly.sdk.server.interfaces; + +/** + * Contains wrapper SDK information. + *

    + * This is intended for use within the SDK. + */ +final public class WrapperInfo { + private final String wrapperName; + private final String wrapperVersion; + + /** + * Get the name of the wrapper. + * + * @return the wrapper name + */ + public String getWrapperName() { + return wrapperName; + } + + /** + * Get the version of the wrapper. + * + * @return the wrapper version + */ + public String getWrapperVersion() { + return wrapperVersion; + } + + /** + * Used internally by the SDK to track wrapper information. + * + * @param wrapperName the name of the wrapper + * @param wrapperVersion the version of the wrapper + */ + public WrapperInfo(String wrapperName, String wrapperVersion) { + this.wrapperName = wrapperName; + this.wrapperVersion = wrapperVersion; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/package-info.java new file mode 100644 index 0000000..c511b30 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/interfaces/package-info.java @@ -0,0 +1,16 @@ +/** + * Types that are part of the public API, but are not needed for basic use of the SDK. + *

    + * Types in this namespace include: + *

      + *
    • The interface {@link com.launchdarkly.sdk.server.interfaces.LDClientInterface}, which + * allow the SDK client to be referenced via an interface rather than the concrete type + * {@link com.launchdarkly.sdk.server.LDClient}.
    • + *
    • Interfaces like {@link com.launchdarkly.sdk.server.interfaces.FlagTracker} that provide a + * facade for some part of the SDK API; these are returned by methods like + * {@link com.launchdarkly.sdk.server.LDClient#getFlagTracker()}.
    • + *
    • Concrete types that are used as parameters within these interfaces, like + * {@link com.launchdarkly.sdk.server.interfaces.FlagChangeEvent}.
    • + *
    + */ +package com.launchdarkly.sdk.server.interfaces; diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/Migration.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/Migration.java new file mode 100644 index 0000000..61a8bb2 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/Migration.java @@ -0,0 +1,570 @@ +package com.launchdarkly.sdk.server.migrations; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.MigrationOp; +import com.launchdarkly.sdk.server.MigrationOpTracker; +import com.launchdarkly.sdk.server.MigrationOrigin; +import com.launchdarkly.sdk.server.MigrationStage; +import com.launchdarkly.sdk.server.MigrationVariation; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadLocalRandom; + +/** + * Class for performing a technology migration. + *

    + * This class is not intended to be instanced directly, but instead should be constructed + * using the {@link MigrationBuilder}. + *

    + * The thread safety model for a migration depends on the usage of thread-safe elements. Specifically the tracker, + * the client, and the thread pool should be thread-safe. Other elements of the migration instance itself are immutable + * for their thread-safety. + * + * @param The result type for reads. + * @param The result type for writes. + * @param The input parameter type for reads. + * @param The input type for writes. + */ +public final class Migration { + private final Reader readOld; + private final Reader readNew; + + private final Writer writeOld; + private final Writer writeNew; + + private final ReadConsistencyChecker checker; + + private final MigrationExecution execution; + + private final boolean latencyTracking; + + private final boolean errorTracking; + + private final LDClientInterface client; + private final LDLogger logger; + + private final ExecutorService pool = Executors.newCachedThreadPool(); + + Migration( + LDClientInterface client, + Reader readOld, + Reader readNew, + Writer writeOld, + Writer writeNew, + ReadConsistencyChecker checker, + MigrationExecution execution, + boolean latencyTracking, + boolean errorTracking) { + this.client = client; + this.readOld = readOld; + this.readNew = readNew; + this.writeOld = writeOld; + + this.writeNew = writeNew; + this.checker = checker; + this.execution = execution; + this.latencyTracking = latencyTracking; + this.errorTracking = errorTracking; + this.logger = client.getLogger(); + } + + public interface Method { + MigrationMethodResult execute(UInput payload); + } + + + /** + * This interface defines a read method. + * + * @param the payload type of the read + * @param the result type of the read + */ + public interface Reader extends Method { + } + + /** + * This interfaces defines a write method. + * + * @param the payload type of the write + * @param the return type of the write + */ + public interface Writer extends Method { + } + + /** + * This interface defines a method for checking the consistency of two reads. + * + * @param the result type of the read + */ + public interface ReadConsistencyChecker { + boolean check(TReadResult a, TReadResult b); + } + + /** + * This class represents the result of a migration operation. + *

    + * In the case of a read operation the result will be this type. Write operations may need to return multiple results + * and therefore use the {@link MigrationWriteResult} type. + * + * @param the result type of the operation + */ + public static final class MigrationResult { + private final boolean success; + private final MigrationOrigin origin; + private final TResult result; + private final Exception exception; + + public MigrationResult( + boolean success, + @NotNull MigrationOrigin origin, + @Nullable TResult result, + @Nullable Exception exception) { + this.success = success; + this.origin = origin; + this.result = result; + this.exception = exception; + } + + /** + * Check if the operation was a success. + * + * @return true if the operation was a success + */ + public boolean isSuccess() { + return success; + } + + /** + * Get the origin associated with the result. + * + * @return The origin of the result. + */ + public MigrationOrigin getOrigin() { + return origin; + } + + /** + * The result. This may be an empty optional if an error occurred. + * + * @return The result, or an empty optional if no result was generated. + */ + public Optional getResult() { + return Optional.ofNullable(result); + } + + /** + * Get the exception associated with the result or an empty optional if there + * was no exception. + *

    + * A result may not be successful, but may also not have an exception associated with it. + * + * @return the exception, or an empty optional if no result was produced + */ + public Optional getException() { + return Optional.ofNullable(exception); + } + } + + /** + * The result of a migration write. + *

    + * A migration write result will always include an authoritative result, and it may contain a non-authoritative result. + *

    + * Not all migration stages will execute both writes, and in the case of a write error from the authoritative source + * then the non-authoritative write will not be executed. + * + * @param The result type of the write. + */ + public static final class MigrationWriteResult { + private final MigrationResult authoritative; + private final MigrationResult nonAuthoritative; + + public MigrationWriteResult(@NotNull Migration.MigrationResult authoritative) { + this.authoritative = authoritative; + this.nonAuthoritative = null; + } + + public MigrationWriteResult( + @NotNull Migration.MigrationResult authoritative, + @Nullable Migration.MigrationResult nonAuthoritative) { + this.authoritative = authoritative; + this.nonAuthoritative = nonAuthoritative; + } + + /** + * Get the authoritative result of the write. + * + * @return the authoritative result + */ + public MigrationResult getAuthoritative() { + return authoritative; + } + + /** + * Get the non-authoritative result. + * + * @return the result, or an empty optional if no result was generated + */ + public Optional> getNonAuthoritative() { + return Optional.ofNullable(nonAuthoritative); + } + } + + private static final class MultiReadResult { + private final MigrationResult oldResult; + private final MigrationResult newResult; + + MultiReadResult(MigrationResult oldResult, MigrationResult newResult) { + this.oldResult = oldResult; + this.newResult = newResult; + } + + MigrationResult getOld() { + return oldResult; + } + + MigrationResult getNew() { + return newResult; + } + } + + @NotNull + private MigrationResult doSingleOp( + @Nullable TInput payload, + @NotNull MigrationOpTracker tracker, + @NotNull MigrationOrigin origin, + @NotNull Method method + ) { + tracker.invoked(origin); + MigrationMethodResult res = trackLatency(payload, tracker, origin, method); + if (res.isSuccess()) { + return new MigrationResult<>(true, origin, res.getResult().orElse(null), null); + } + if (errorTracking) { + tracker.error(origin); + } + return new MigrationResult<>(false, origin, null, res.getException().orElse(null)); + } + + @NotNull + private MultiReadResult doMultiRead( + @Nullable TReadInput payload, + @NotNull MigrationOpTracker tracker) { + + MultiReadResult result; + switch (execution.getMode()) { + case SERIAL: + result = doSerialRead(payload, tracker); + break; + case PARALLEL: + result = doParallelRead(payload, tracker); + break; + default: { + // This would likely be an implementation error from extending the execution modes and not updating this code. + logger.error("Unrecognized execution mode while executing migration."); + result = doSerialRead(payload, tracker); + } + } + + if (checker != null && + result.oldResult.success && + result.newResult.success + ) { + // Temporary variables for the lambda invocation. + MigrationResult finalNewResult = result.newResult; + MigrationResult finalOldResult = result.oldResult; + // Note the individual results could be null. For instance reading + // a DB entry that does not exist. + tracker.consistency(() -> checker.check(finalOldResult.result, + finalNewResult.result)); + } + + return result; + } + + @NotNull + private MultiReadResult doSerialRead( + @Nullable TReadInput payload, + @NotNull MigrationOpTracker tracker) { + + MigrationSerialOrder order = execution.getOrder().orElse(MigrationSerialOrder.FIXED); + + int result = 0; + if (order == MigrationSerialOrder.RANDOM) { + // This random number is not used for cryptographic purposes. + result = ThreadLocalRandom.current().nextInt(2); + } + + MigrationResult oldResult; + MigrationResult newResult; + if (result == 0) { + oldResult = doSingleOp(payload, tracker, MigrationOrigin.OLD, readOld); + newResult = doSingleOp(payload, tracker, MigrationOrigin.NEW, readNew); + } else { + newResult = doSingleOp(payload, tracker, MigrationOrigin.NEW, readNew); + oldResult = doSingleOp(payload, tracker, MigrationOrigin.OLD, readOld); + } + + return new MultiReadResult<>(oldResult, newResult); + } + + @NotNull + private MultiReadResult doParallelRead( + @Nullable TReadInput payload, + @NotNull MigrationOpTracker tracker) { + List>> tasks = new ArrayList<>(); + tasks.add(() -> doSingleOp(payload, tracker, MigrationOrigin.OLD, readOld)); + tasks.add(() -> doSingleOp(payload, tracker, MigrationOrigin.NEW, readNew)); + try { + List>> futures = pool.invokeAll(tasks); + + // We do not initialize bad results here in order to reduce the amount of garbage that needs collected. + // For happy path the result would never be used. + MigrationResult oldResult = null; + MigrationResult newResult = null; + + for (Future> future : futures) { + try { + MigrationResult result = future.get(); + switch (result.origin) { + case OLD: + oldResult = result; + break; + case NEW: + newResult = result; + break; + } + } catch (Exception e) { + // We do not know which result, just that one of them failed. + // After this stage we can null check and add failed results. + logger.error("An error occurred executing parallel reads: {}", e); + } + } + + // If either of these is null, then we know that we failed to get the task. + // This represents a threading failure. + if (oldResult == null) { + oldResult = new MigrationResult<>(false, MigrationOrigin.OLD, null, null); + } + if (newResult == null) { + newResult = new MigrationResult<>(false, MigrationOrigin.NEW, null, null); + } + + return new MultiReadResult<>(oldResult, newResult); + } catch (Exception e) { + logger.error("An error occurred executing parallel reads: {}", e); + } + + // Something threading related happened, and we could not get any results. + return new MultiReadResult<>( + new MigrationResult<>(false, MigrationOrigin.OLD, null, null), + new MigrationResult<>(false, MigrationOrigin.NEW, null, null)); + } + + @NotNull + private MigrationMethodResult trackLatency( + @Nullable UInput payload, + @NotNull MigrationOpTracker tracker, + @NotNull MigrationOrigin origin, + @NotNull Method method + ) { + MigrationMethodResult res; + if (latencyTracking) { + long start = System.currentTimeMillis(); + res = safeCall(payload, method); + long stop = System.currentTimeMillis(); + tracker.latency(origin, Duration.of(stop - start, ChronoUnit.MILLIS)); + } else { + res = safeCall(payload, method); + } + return res; + } + + @NotNull + private static MigrationMethodResult safeCall( + @Nullable UInput payload, + @NotNull Method method + ) { + MigrationMethodResult res; + try { + res = method.execute(payload); + } catch (Exception e) { + res = MigrationMethodResult.Failure(e); + } + return res; + } + + @NotNull + private MigrationResult handleReadStage( + @Nullable TReadInput payload, + @NotNull MigrationVariation migrationVariation, + @NotNull MigrationOpTracker tracker) { + switch (migrationVariation.getStage()) { + case OFF: // Intentionally falls through. + case DUAL_WRITE: { + return doSingleOp(payload, tracker, MigrationOrigin.OLD, readOld); + } + case SHADOW: { + return doMultiRead(payload, tracker).getOld(); + } + case LIVE: { + return doMultiRead(payload, tracker).getNew(); + } + case RAMP_DOWN: // Intentionally falls through. + case COMPLETE: { + return doSingleOp(payload, tracker, MigrationOrigin.NEW, readNew); + } + default: { + // If this error occurs it would be because an additional migration stage + // was added, but this code was not updated to support it. + throw new RuntimeException("Unsupported migration stage."); + } + } + } + + /** + * Execute a migration based read with a payload. + *

    + * To execute a read without a payload use {@link #read(String, LDContext, MigrationStage)}. + * + * @param key the flag key of migration flag + * @param context the context for the migration + * @param defaultStage the default migration stage + * @param payload an optional payload that will be passed to the new/old read implementations + * @return the result of the read + */ + @NotNull + public MigrationResult read( + @NotNull String key, + @NotNull LDContext context, + @NotNull MigrationStage defaultStage, + @Nullable TReadInput payload) { + MigrationVariation migrationVariation = client.migrationVariation(key, context, defaultStage); + MigrationOpTracker tracker = migrationVariation.getTracker(); + tracker.op(MigrationOp.READ); + + MigrationResult res = handleReadStage(payload, migrationVariation, tracker); + + client.trackMigration(tracker); + + return res; + } + + /** + * Execute a migration based read. + *

    + * To execute a read with a payload use {@link #read(String, LDContext, MigrationStage, Object)}. + * + * @param key the flag key of migration flag + * @param context the context for the migration + * @param defaultStage the default migration stage + * @return the result of the read + */ + @NotNull + public MigrationResult read( + @NotNull String key, + @NotNull LDContext context, + @NotNull MigrationStage defaultStage) { + return read(key, context, defaultStage, null); + } + + @NotNull + private MigrationWriteResult handleWriteStage( + @Nullable TWriteInput payload, + @NotNull MigrationVariation migrationVariation, + @NotNull MigrationOpTracker tracker) { + switch (migrationVariation.getStage()) { + case OFF: { + MigrationResult res = doSingleOp(payload, tracker, MigrationOrigin.OLD, writeOld); + return new MigrationWriteResult<>(res); + } + case DUAL_WRITE: // Intentionally falls through. + case SHADOW: { + MigrationResult oldResult = doSingleOp(payload, tracker, MigrationOrigin.OLD, writeOld); + + if (!oldResult.success) { + return new MigrationWriteResult<>(oldResult); + } + MigrationResult newResult = doSingleOp(payload, tracker, MigrationOrigin.NEW, writeNew); + return new MigrationWriteResult<>(oldResult, newResult); + } + case LIVE: // Intentionally falls through. + case RAMP_DOWN: { + MigrationResult newResult = doSingleOp(payload, tracker, MigrationOrigin.NEW, writeNew); + + if (!newResult.success) { + return new MigrationWriteResult<>(newResult); + } + MigrationResult oldResult = doSingleOp(payload, tracker, MigrationOrigin.OLD, writeOld); + return new MigrationWriteResult<>(newResult, oldResult); + } + case COMPLETE: { + MigrationResult res = doSingleOp(payload, tracker, MigrationOrigin.NEW, writeNew); + return new MigrationWriteResult<>(res); + } + default: { + // If this error occurs it would be because an additional migration stage + // was added, but this code was not updated to support it. + throw new RuntimeException("Unsupported migration stage."); + } + } + } + + /** + * Execute a migration based write with a payload. + *

    + * To execute a write without a payload use {@link #write(String, LDContext, MigrationStage)}. + * + * @param key the flag key of migration flag + * @param context the context for the migration + * @param defaultStage the default migration stage + * @param payload an optional payload that will be passed to the new/old write implementations + * @return the result of the write + */ + @NotNull + public MigrationWriteResult write( + @NotNull String key, + @NotNull LDContext context, + @NotNull MigrationStage defaultStage, + @Nullable TWriteInput payload) { + MigrationVariation migrationVariation = client.migrationVariation(key, context, defaultStage); + MigrationOpTracker tracker = migrationVariation.getTracker(); + tracker.op(MigrationOp.WRITE); + + MigrationWriteResult res = handleWriteStage(payload, migrationVariation, tracker); + + client.trackMigration(tracker); + + return res; + } + + /** + * Execute a migration based write. + *

    + * To execute a read with a payload use {@link #write(String, LDContext, MigrationStage, Object)}. + * + * @param key the flag key of migration flag + * @param context the context for the migration + * @param defaultStage the default migration stage + * @return the result of the write + */ + @NotNull + public MigrationWriteResult write( + @NotNull String key, + @NotNull LDContext context, + @NotNull MigrationStage defaultStage) { + return write(key, context, defaultStage, null); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationBuilder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationBuilder.java new file mode 100644 index 0000000..6a83cb0 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationBuilder.java @@ -0,0 +1,183 @@ +package com.launchdarkly.sdk.server.migrations; + +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import org.jetbrains.annotations.NotNull; + +import java.util.Optional; + +/** + * This builder is used to construct {@link Migration} instances. + *

    + * This class is not thread-safe. The builder should be used on one thread and then the + * built {@link Migration} is thread safe. + * + * @param The result type for reads. + * @param The result type for writes. + * @param The input parameter type for reads. + * @param The input type for writes. + */ +public class MigrationBuilder { + private Migration.Reader readOld; + private Migration.Reader readNew; + + private Migration.Writer writeOld; + private Migration.Writer writeNew; + + private Migration.ReadConsistencyChecker checker; + + private MigrationExecution execution = MigrationExecution.Parallel(); + + private boolean latencyTracking = true; + + private boolean errorTracking = true; + + private final LDClientInterface client; + + /** + * Construct a new builder. + * + * @param client this client will be used for {@link Migration}s built from + * this builder + */ + public MigrationBuilder(LDClientInterface client) { + this.client = client; + } + + /** + * Enable or disable latency tracking. Tracking is enabled by default. + * + * @param track true to enable tracking, false to disable it + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder trackLatency(boolean track) { + this.latencyTracking = track; + return this; + } + + /** + * Enable or disable error tracking. Tracking is enabled by default. + * + * @param track true to enable error tracking, false to disable it + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder trackErrors(boolean track) { + this.errorTracking = track; + return this; + } + + /** + * Influences the level of concurrency when the migration stage calls for multiple execution reads. + *

    + * The default read execution is {@link MigrationExecution#Parallel()}. + *

    + * Setting the execution to randomized serial order. + *

    +   *   builder.readExecution(MigrationExecution.Serial(MigrationSerialOrder.RANDOM));
    +   * 
    + * + * @param execution the execution configuration + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder readExecution(MigrationExecution execution) { + this.execution = execution; + return this; + } + + /** + * Configure the read methods of the migration. + *

    + * Users are required to provide two different read methods -- one to read from the old migration source, and one to + * read from the new source. This method allows specifying a check method for consistency tracking. + *

    + * If you do not want consistency tracking, then use + * {@link MigrationBuilder#read(Migration.Reader, Migration.Reader)}. + * + * @param oldImpl method for reading from the "old" migration source + * @param newImpl method for reading from the "new" migration source + * @param checker method which checks the consistency of the "old" and "new" source + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder read( + @NotNull Migration.Reader oldImpl, + @NotNull Migration.Reader newImpl, + @NotNull Migration.ReadConsistencyChecker checker + ) { + this.readOld = oldImpl; + this.readNew = newImpl; + this.checker = checker; + return this; + } + + /** + * Configure the read methods of the migration. + *

    + * Users are required to provide two different read methods -- one to read from the old migration source, and one to + * read from the new source. This method does not enable consistency tracking. + *

    + * If you do want consistency tracking, then use + * {@link MigrationBuilder#read(Migration.Reader, Migration.Reader, Migration.ReadConsistencyChecker)}. + * + * @param oldImpl method for reading from the "old" migration source + * @param newImpl method for reading from the "new" migration source + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder read( + @NotNull Migration.Reader oldImpl, + @NotNull Migration.Reader newImpl + ) { + this.readOld = oldImpl; + this.readNew = newImpl; + return this; + } + + /** + * Configure the write methods of the migration. + *

    + * Users are required to provide two different write methods -- one to write to the old migration source, and one to + * write to the new source. Not every stage requires + * + * @param oldImpl method which writes to the "old" source + * @param newImpl method which writes to the "new" source + * @return a reference to this builder + */ + @NotNull + public MigrationBuilder write( + @NotNull Migration.Writer oldImpl, + @NotNull Migration.Writer newImpl + ) { + this.writeOld = oldImpl; + this.writeNew = newImpl; + return this; + } + + /** + * Build a {@link Migration}. + *

    + * A migration requires that both the read and write methods are defined. If they have not been defined, then + * a migration cannot be constructed. In this case an empty optional will be returned. + * + * @return Either an empty optional or an optional containing a {@link Migration}. + */ + @NotNull + public Optional> build() { + // All the methods must be set to make a valid migration. + if ( + readNew == null || + readOld == null || + writeNew == null || + writeOld == null + ) { + // TODO: Log something. + return Optional.empty(); + } + return Optional.of(new Migration<>( + client, readOld, readNew, writeOld, writeNew, + checker, execution, latencyTracking, errorTracking + )); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecution.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecution.java new file mode 100644 index 0000000..b9ff4a5 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecution.java @@ -0,0 +1,81 @@ +package com.launchdarkly.sdk.server.migrations; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Optional; + +/** + * This class is used to control the execution mechanism for migrations. + *

    + * Read operations may be executed in parallel, sequentially in a fixed order, or sequentially in a randomized order. + *

    + * This class facilitates correct combinations of parallel/serial with random/fixed. + */ +public class MigrationExecution { + private final MigrationExecutionMode mode; + private final MigrationSerialOrder order; + + private MigrationExecution( + @NotNull MigrationExecutionMode mode, + @Nullable MigrationSerialOrder order) { + this.mode = mode; + this.order = order; + } + + /** + * Construct a serial execution with the specified ordering. + * + * @param order The serial execution order fixed/random. + * @return an execution instance + */ + public static MigrationExecution Serial(@NotNull MigrationSerialOrder order) { + return new MigrationExecution(MigrationExecutionMode.SERIAL, order); + } + + /** + * Constructs a parallel execution. + * + * @return an execution instance + */ + public static MigrationExecution Parallel() { + return new MigrationExecution(MigrationExecutionMode.PARALLEL, null); + } + + /** + * Get the current execution mode. + * + * @return The execution mode. + */ + public MigrationExecutionMode getMode() { + return mode; + } + + /** + * If the execution mode is {@link MigrationExecutionMode#SERIAL}, then this will contain an execution order. + * If the mode is not SERIAL, then this will return an empty optional. + * + * @return The optional execution mode. + */ + public Optional getOrder() { + return Optional.ofNullable(order); + } + + /** + * A string representation of the migration execution. The return value from this function should only be used + * for logging or human-read identification. It should not be used programmatically and will not follow semver. + * + * @return A string representation of the string. + */ + @Override + public String toString() { + String strValue = ""; + + strValue += mode.toString(); + if (order != null) { + strValue += "-"; + strValue += order.toString(); + } + return strValue; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecutionMode.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecutionMode.java new file mode 100644 index 0000000..06c1041 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationExecutionMode.java @@ -0,0 +1,17 @@ +package com.launchdarkly.sdk.server.migrations; + +/** + * Execution mode for a migration. + *

    + * This applies only to a single read, not multiple reads using the same migration. + */ +public enum MigrationExecutionMode { + /** + * Execute one read fully before executing another read. + */ + SERIAL, + /** + * Start reads in parallel and wait for them to both finish. + */ + PARALLEL +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationMethodResult.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationMethodResult.java new file mode 100644 index 0000000..bfcf49d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationMethodResult.java @@ -0,0 +1,107 @@ +package com.launchdarkly.sdk.server.migrations; + +import org.jetbrains.annotations.Nullable; + +import java.util.Optional; + +/** + * Results of a method associated with a migration origin. + *

    + * A result may either be a success, which will include a result type, or a failure. + *

    + * The static methods are intended to be used to create results in a migration method. + *

    + * An exception thrown from a migration method will be equivalent to using the + * {@link MigrationMethodResult#Failure(Exception)} method. + * + *

    
    + *   .read((payload) -> {
    + *       return MigrationMethodResult.Success("My Result!");
    + *   })
    + * 
    + *
    
    + *   .read((payload) -> {
    + *       return MigrationMethodResult.Failure();
    + *   })
    + * 
    + * + * @param the type of the result + */ +public final class MigrationMethodResult { + private MigrationMethodResult( + boolean success, + @Nullable T result, + @Nullable Exception exception) { + this.success = success; + this.result = result; + this.exception = exception; + } + + private final boolean success; + private final T result; + + private final Exception exception; + + /** + * Construct a method result representing a failure. + *

    + * This method doesn't provide any information about the cause of the failure. It is recommended + * to throw an exception or use {@link MigrationMethodResult#Failure(Exception)}. + * + * @return a method result + * @param the type of the method result + */ + public static MigrationMethodResult Failure() { + return new MigrationMethodResult<>(false, null, null); + } + + /** + * Construct a method result representing a failure based on an Exception. + * + * @param err the exception which caused the failure + * @return a method result + * @param the type of the method result + */ + public static MigrationMethodResult Failure(Exception err) { + return new MigrationMethodResult<>(false, null, err); + } + + /** + * Create a successful method result. + * + * @param result the result of the method + * @return a method result + * @param the type of the method result + */ + public static MigrationMethodResult Success(U result) { + return new MigrationMethodResult<>(true, result, null); + } + + /** + * Returns true if the method was successful. + * + * @return true if the method was successful + */ + public boolean isSuccess() { + return success; + } + + /** + * Get the result of the method. + * + * @return the result, or an empty optional if no result was produced + */ + public Optional getResult() { + return Optional.ofNullable(result); + } + + /** + * Get the exception associated with the method or an empty optional if there + * was no exception. + * + * @return the exception, or an empty optional if no result was produced + */ + public Optional getException() { + return Optional.ofNullable(exception); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationSerialOrder.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationSerialOrder.java new file mode 100644 index 0000000..cbef552 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/MigrationSerialOrder.java @@ -0,0 +1,15 @@ +package com.launchdarkly.sdk.server.migrations; + +/** + * When using serial execution controls the order reads are executed. + */ +public enum MigrationSerialOrder { + /** + * Each time a read is performed randomize the order. + */ + RANDOM, + /** + * Always execute reads in the same order. + */ + FIXED +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/package-info.java new file mode 100644 index 0000000..1692f8f --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/migrations/package-info.java @@ -0,0 +1,7 @@ +/** + * Implementations related to technology migrations. + *

    + * The {@link com.launchdarkly.sdk.server.migrations.MigrationBuilder} is the primary entrypoint in this package + * and should be used to configure a technology migration. + */ +package com.launchdarkly.sdk.server.migrations; diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/package-info.java new file mode 100644 index 0000000..c3d6315 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/package-info.java @@ -0,0 +1,10 @@ +/** + * Main package for the LaunchDarkly Server-Side Java SDK, containing the client and configuration classes. + *

    + * You will most often use {@link com.launchdarkly.sdk.server.LDClient} (the SDK client) and + * {@link com.launchdarkly.sdk.server.LDConfig} (configuration options for the client). + *

    + * Other commonly used types such as {@link com.launchdarkly.sdk.LDContext} are in the {@code com.launchdarkly.sdk} + * package, since those are not server-side-specific and are shared with the LaunchDarkly Android SDK. + */ +package com.launchdarkly.sdk.server; diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStore.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStore.java new file mode 100644 index 0000000..082eb20 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStore.java @@ -0,0 +1,45 @@ +package com.launchdarkly.sdk.server.subsystems; + +import java.io.Closeable; + +/** + * Interface for a read-only data store that allows querying of user membership in Big Segments. + *

    + * Big Segments are a specific type of user segments. For more information, read the + * LaunchDarkly documentation + * . + * + * @since 5.7.0 + */ +public interface BigSegmentStore extends Closeable { + /** + * Queries the store for a snapshot of the current segment state for a specific user. + *

    + * The {@code userHash} is a base64-encoded string produced by hashing the user key as defined by + * the Big Segments specification; the store implementation does not need to know the details of + * how this is done, because it deals only with already-hashed keys, but the string can be assumed + * to only contain characters that are valid in base64. + *

    + * If the store is working, but no membership state is found for this user, the method may return + * either {@code null} or an empty {@link BigSegmentStoreTypes.Membership}. It should not throw an + * exception unless there is an unexpected database error or the retrieved data is malformed. + * + * @param userHash the hashed user identifier + * @return the user's segment membership state or {@code null} + */ + BigSegmentStoreTypes.Membership getMembership(String userHash); + + /** + * Returns information about the overall state of the store. + *

    + * This method will be called only when the SDK needs the latest state, so it should not be + * cached. + *

    + * If the store is working, but no metadata has been stored in it yet, the method should return + * {@code null}. It should not throw an exception unless there is an unexpected database error or + * the retrieved data is malformed. + * + * @return the store metadata or null + */ + BigSegmentStoreTypes.StoreMetadata getMetadata(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStoreTypes.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStoreTypes.java new file mode 100644 index 0000000..d4d20bf --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/BigSegmentStoreTypes.java @@ -0,0 +1,225 @@ +package com.launchdarkly.sdk.server.subsystems; + +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Types that are used by the {@link BigSegmentStore} interface. + * + * @since 5.7.0 + */ +public abstract class BigSegmentStoreTypes { + private BigSegmentStoreTypes() { + } + + /** + * A query interface returned by {@link BigSegmentStore#getMembership(String)}. + *

    + * It is associated with a single user, and provides the ability to check whether that user is + * included in or excluded from any number of Big Segments. + *

    + * This is an immutable snapshot of the state for this user at the time + * {@link BigSegmentStore#getMembership(String)} was called. Calling + * {@link #checkMembership(String)} should not cause the state to be queried again. + * Implementations should be safe for concurrent access by multiple threads. + */ + public static interface Membership { + /** + * Tests whether the user is explicitly included or explicitly excluded in the specified + * segment, or neither. + *

    + * The segment is identified by a {@code segmentRef} which is not the same as the segment key: + * it includes the key but also versioning information that the SDK will provide. The store + * implementation should not be concerned with the format of this. + *

    + * If the user is explicitly included (regardless of whether the user is also explicitly + * excluded or not-- that is, inclusion takes priority over exclusion), the method returns a + * {@code true} value. + *

    + * If the user is explicitly excluded, and is not explicitly included, the method returns a + * {@code false} value. + *

    + * If the user's status in the segment is undefined, the method returns {@code null}. + * + * @param segmentRef a string representing the segment query + * @return boolean for explicit inclusion/exclusion, null for unspecified + */ + Boolean checkMembership(String segmentRef); + } + + /** + * Convenience method for creating an implementation of {@link Membership}. + *

    + * This method is intended to be used by Big Segment store implementations; application code does + * not need to use it. + *

    + * Store implementations are free to implement {@link Membership} in any way that they find + * convenient and efficient, depending on what format they obtain values in from the database, but + * this method provides a simple way to do it as long as there are iterables of included and + * excluded segment references. As described in {@link Membership}, a {@code segmentRef} is not + * the same as the key property in the segment data model; it includes the key but also versioning + * information that the SDK will provide. The store implementation should not be concerned with + * the format of this. + *

    + * The returned object's {@link Membership#checkMembership(String)} method will return + * {@code true} for any {@code segmentRef} that is in the included list, + * {@code false} for any {@code segmentRef} that is in the excluded list and not also in the + * included list (that is, inclusions override exclusions), and {@code null} for all others. + *

    + * This method is optimized to return a singleton empty membership object whenever the inclusion + * and exclusions lists are both empty. + *

    + * The returned object implements {@link Object#equals(Object)} in such a way that it correctly + * tests equality when compared to any object returned from this factory method, but it is always + * unequal to any other types of objects. + * + * @param includedSegmentRefs the inclusion list (null is equivalent to an empty iterable) + * @param excludedSegmentRefs the exclusion list (null is equivalent to an empty iterable) + * @return an {@link Membership} + */ + public static Membership createMembershipFromSegmentRefs( + Iterable includedSegmentRefs, + Iterable excludedSegmentRefs) { + MembershipBuilder membershipBuilder = new MembershipBuilder(); + // we must add excludes first so includes will override them + membershipBuilder.addRefs(excludedSegmentRefs, false); + membershipBuilder.addRefs(includedSegmentRefs, true); + return membershipBuilder.build(); + } + + /** + * Values returned by {@link BigSegmentStore#getMetadata()}. + */ + public static final class StoreMetadata { + private final long lastUpToDate; + + /** + * Constructor for a {@link StoreMetadata}. + * + * @param lastUpToDate the Unix millisecond timestamp of the last update + */ + public StoreMetadata(long lastUpToDate) { + this.lastUpToDate = lastUpToDate; + } + + /** + * The timestamp of the last update to the {@link BigSegmentStore}. + * + * @return the last update timestamp as Unix milliseconds + */ + public long getLastUpToDate() { + return this.lastUpToDate; + } + } + + private static class MembershipBuilder { + private boolean nonEmpty; + private String firstValue; + private boolean firstValueIncluded; + private HashMap map; + + void addRefs(Iterable segmentRefs, boolean included) { + if (segmentRefs == null) { + return; + } + for (String s : segmentRefs) { + if (s == null) { + continue; + } + if (nonEmpty) { + if (map == null) { + map = new HashMap<>(); + map.put(firstValue, firstValueIncluded); + } + map.put(s, included); + } else { + firstValue = s; + firstValueIncluded = included; + nonEmpty = true; + } + } + } + + Membership build() { + if (nonEmpty) { + if (map != null) { + return new MapMembership(map); + } + return new SingleValueMembership(firstValue, firstValueIncluded); + } + return EmptyMembership.instance; + } + + private static final class EmptyMembership implements Membership { + static final EmptyMembership instance = new EmptyMembership(); + + @Override + public Boolean checkMembership(String segmentRef) { + return null; + } + + @Override + public boolean equals(Object o) { + return o instanceof EmptyMembership; + } + + @Override + public int hashCode() { + return 0; + } + } + + private static final class SingleValueMembership implements Membership { + private final String segmentRef; + private final boolean included; + + SingleValueMembership(String segmentRef, boolean included) { + this.segmentRef = segmentRef; + this.included = included; + } + + @Override + public Boolean checkMembership(String segmentRef) { + return this.segmentRef.equals(segmentRef) ? included : null; + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof SingleValueMembership)) { + return false; + } + SingleValueMembership other = (SingleValueMembership) o; + return segmentRef.equals(other.segmentRef) && included == other.included; + } + + @Override + public int hashCode() { + return segmentRef.hashCode() + (included ? 1 : 0); + } + } + + private static final class MapMembership implements Membership { + private final Map map; + + private MapMembership(Map map) { + this.map = map; + } + + @Override + public Boolean checkMembership(String segmentRef) { + return map.get(segmentRef); + } + + @Override + public boolean equals(Object o) { + return o instanceof MapMembership && map.equals(((MapMembership) o).map); + } + + @Override + public int hashCode() { + return Objects.hash(map); + } + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ClientContext.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ClientContext.java new file mode 100644 index 0000000..1df46b3 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ClientContext.java @@ -0,0 +1,217 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig.Builder; +import com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; +import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints; +import com.launchdarkly.sdk.server.interfaces.WrapperInfo; + +/** + * Context information provided by the {@link com.launchdarkly.sdk.server.LDClient} when creating components. + *

    + * This is passed as a parameter to component factories that implement {@link ComponentConfigurer}. + * Component factories do not receive the entire {@link com.launchdarkly.sdk.server.LDConfig} because + * it could contain factory objects that have mutable state, and because components should not be able + * to access the configurations of unrelated components. + *

    + * The actual implementation class may contain other properties that are only relevant to the built-in + * SDK components and are therefore not part of this base class; this allows the SDK to add its own context + * information as needed without disturbing the public API. + * + * @since 5.0.0 + */ +public class ClientContext { + private final String sdkKey; + private final ApplicationInfo applicationInfo; + private final LDLogger baseLogger; + private final HttpConfiguration http; + private final LoggingConfiguration logging; + private final boolean offline; + private final ServiceEndpoints serviceEndpoints; + private final int threadPriority; + private WrapperInfo wrapperInfo; + + /** + * Constructor that sets all properties. All should be non-null. + * + * @param sdkKey the SDK key + * @param applicationInfo application metadata properties from + * {@link Builder#applicationInfo(com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder)} + * @param http HTTP configuration properties from {@link Builder#http(ComponentConfigurer)} + * @param logging logging configuration properties from {@link Builder#logging(ComponentConfigurer)} + * @param offline true if the SDK should be entirely offline + * @param serviceEndpoints service endpoint URI properties from + * {@link Builder#serviceEndpoints(com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder)} + * @param threadPriority worker thread priority from {@link Builder#threadPriority(int)} + * @param wrapperInfo wrapper configuration from {@link Builder#wrapper(com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder)} + */ + public ClientContext( + String sdkKey, + ApplicationInfo applicationInfo, + HttpConfiguration http, + LoggingConfiguration logging, + boolean offline, + ServiceEndpoints serviceEndpoints, + int threadPriority, + WrapperInfo wrapperInfo + ) { + this.sdkKey = sdkKey; + this.applicationInfo = applicationInfo; + this.http = http; + this.logging = logging; + this.offline = offline; + this.serviceEndpoints = serviceEndpoints; + this.threadPriority = threadPriority; + this.wrapperInfo = wrapperInfo; + + this.baseLogger = logging == null ? LDLogger.none() : + LDLogger.withAdapter(logging.getLogAdapter(), logging.getBaseLoggerName()); + } + + /** + * Copy constructor. + * + * @param copyFrom the instance to copy from + */ + protected ClientContext(ClientContext copyFrom) { + this(copyFrom.sdkKey, copyFrom.applicationInfo, copyFrom.http, copyFrom.logging, + copyFrom.offline, copyFrom.serviceEndpoints, copyFrom.threadPriority, copyFrom.wrapperInfo); + } + + /** + * Basic constructor for convenience in testing, using defaults for most properties. + * + * @param sdkKey the SDK key + */ + public ClientContext(String sdkKey) { + this( + sdkKey, + new ApplicationInfo(null, null), + defaultHttp(sdkKey), + defaultLogging(), + false, + Components.serviceEndpoints().createServiceEndpoints(), + Thread.MIN_PRIORITY, + null + ); + } + + private static HttpConfiguration defaultHttp(String sdkKey) { + ClientContext minimalContext = new ClientContext(sdkKey, null, null, null, false, null, 0, null); + return Components.httpConfiguration().build(minimalContext); + } + + private static LoggingConfiguration defaultLogging() { + ClientContext minimalContext = new ClientContext("", null, null, null, false, null, 0, null); + return Components.logging().build(minimalContext); + } + + /** + * Returns the configured SDK key. + * + * @return the SDK key + */ + public String getSdkKey() { + return sdkKey; + } + + /** + * Returns the application metadata, if any, set by + * {@link Builder#applicationInfo(com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder)}. + * + * @return the application metadata or null + */ + public ApplicationInfo getApplicationInfo() { + return applicationInfo; + } + + /** + * The base logger for the SDK. + * @return a logger instance + */ + public LDLogger getBaseLogger() { + return baseLogger; + } + + /** + * Returns the component that {@link DataSource} implementations use to deliver data and status + * updates to the SDK. + *

    + * This component is only available when the SDK is calling a {@link DataSource} factory. + * Otherwise the method returns null. + * + * @return the {@link DataSourceUpdateSink}, if applicable + */ + public DataSourceUpdateSink getDataSourceUpdateSink() { + return null; + } + + /** + * Returns the component that {@link DataStore} implementations use to deliver data store status + * updates to the SDK. + *

    + * This component is only available when the SDK is calling a {@link DataStore} factory. + * Otherwise the method returns null. + * + * @return the {@link DataStoreUpdateSink}, if applicable + */ + public DataStoreUpdateSink getDataStoreUpdateSink() { + return null; + } + + /** + * The configured networking properties that apply to all components. + * + * @return the HTTP configuration + */ + public HttpConfiguration getHttp() { + return http; + } + + /** + * The configured logging properties that apply to all components. + * @return the logging configuration + */ + public LoggingConfiguration getLogging() { + return logging; + } + + /** + * Returns true if the SDK was configured to be completely offline. + * + * @return true if configured to be offline + */ + public boolean isOffline() { + return offline; + } + + /** + * Returns the base service URIs used by SDK components. + * + * @return the service endpoint URIs + */ + public ServiceEndpoints getServiceEndpoints() { + return serviceEndpoints; + } + + /** + * Returns the worker thread priority that is set by + * {@link Builder#threadPriority(int)}. + * + * @return the thread priority + */ + public int getThreadPriority() { + return threadPriority; + } + + /** + * Returns the wrapper information. + * + * @return the wrapper information + */ + public WrapperInfo getWrapperInfo() { + return wrapperInfo; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ComponentConfigurer.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ComponentConfigurer.java new file mode 100644 index 0000000..10a9a0b --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/ComponentConfigurer.java @@ -0,0 +1,20 @@ +package com.launchdarkly.sdk.server.subsystems; + +/** + * The common interface for SDK component factories and configuration builders. Applications should not + * need to implement this interface. + * + * @param the type of SDK component or configuration object being constructed + * @since 6.0.0 + */ +public interface ComponentConfigurer { + /** + * Called internally by the SDK to create an implementation instance. Applications should not need + * to call this method. + * + * @param clientContext provides configuration properties and other components from the current + * SDK client instance + * @return a instance of the component type + */ + T build(ClientContext clientContext); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSource.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSource.java new file mode 100644 index 0000000..eb844c0 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSource.java @@ -0,0 +1,43 @@ +package com.launchdarkly.sdk.server.subsystems; + +import java.io.Closeable; +import java.io.IOException; +import java.util.concurrent.Future; + +/** + * Interface for an object that receives updates to feature flags, user segments, and anything + * else that might come from LaunchDarkly, and passes them to a {@link DataStore}. + *

    + * The standard implementations are: + *

      + *
    • {@link com.launchdarkly.sdk.server.Components#streamingDataSource()} (the default), which + * maintains a streaming connection to LaunchDarkly; + *
    • {@link com.launchdarkly.sdk.server.Components#pollingDataSource()}, which polls for + * updates at regular intervals; + *
    • {@link com.launchdarkly.sdk.server.Components#externalUpdatesOnly()}, which does nothing + * (on the assumption that another process will update the data store); + *
    • {@link com.launchdarkly.sdk.server.integrations.FileData}, which reads flag data from + * the filesystem. + *
    + * + * @since 5.0.0 + */ +public interface DataSource extends Closeable { + /** + * Starts the client. + * @return {@link Future}'s completion status indicates the client has been initialized. + */ + Future start(); + + /** + * Returns true once the client has been initialized and will never return false again. + * @return true if the client has been initialized + */ + boolean isInitialized(); + + /** + * Tells the component to shut down and release any resources it is using. + * @throws IOException if there is an error while closing + */ + void close() throws IOException; +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSourceUpdateSink.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSourceUpdateSink.java new file mode 100644 index 0000000..622031d --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataSourceUpdateSink.java @@ -0,0 +1,87 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +/** + * Interface that a data source implementation will use to push data into the SDK. + *

    + * The data source interacts with this object, rather than manipulating the data store directly, so + * that the SDK can perform any other necessary operations that must happen when data is updated. + * + * @since 5.0.0 + * @see ClientContext#getDataSourceUpdateSink() + */ +public interface DataSourceUpdateSink { + /** + * Completely overwrites the current contents of the data store with a set of items for each collection. + *

    + * If the underlying data store throws an error during this operation, the SDK will catch it, log it, + * and set the data source state to {@link State#INTERRUPTED} with an error of + * {@link ErrorKind#STORE_ERROR}. It will not rethrow the error to the data + * source, but will simply return {@code false} to indicate that the operation failed. + * + * @param allData a list of {@link DataStoreTypes.DataKind} instances and their corresponding data sets + * @return true if the update succeeded, false if it failed + */ + boolean init(FullDataSet allData); + + /** + * Updates or inserts an item in the specified collection. For updates, the object will only be + * updated if the existing version is less than the new version. + *

    + * To mark an item as deleted, pass an {@link ItemDescriptor} that contains a null, with a version + * number (you may use {@link ItemDescriptor#deletedItem(int)}). Deletions must be versioned so that + * they do not overwrite a later update in case updates are received out of order. + *

    + * If the underlying data store throws an error during this operation, the SDK will catch it, log it, + * and set the data source state to {@link State#INTERRUPTED} with an error of + * {@link ErrorKind#STORE_ERROR}. It will not rethrow the error to the data + * source, but will simply return {@code false} to indicate that the operation failed. + * + * @param kind specifies which collection to use + * @param key the unique key for the item within that collection + * @param item the item to insert or update + * @return true if the update succeeded, false if it failed + */ + boolean upsert(DataKind kind, String key, ItemDescriptor item); + + /** + * Returns an object that provides status tracking for the data store, if applicable. + *

    + * This may be useful if the data source needs to be aware of storage problems that might require it + * to take some special action: for instance, if a database outage may have caused some data to be + * lost and therefore the data should be re-requested from LaunchDarkly. + * + * @return a {@link DataStoreStatusProvider} + */ + DataStoreStatusProvider getDataStoreStatusProvider(); + + /** + * Informs the SDK of a change in the data source's status. + *

    + * Data source implementations should use this method if they have any concept of being in a valid + * state, a temporarily disconnected state, or a permanently stopped state. + *

    + * If {@code newState} is different from the previous state, and/or {@code newError} is non-null, the + * SDK will start returning the new status (adding a timestamp for the change) from + * {@link DataSourceStatusProvider#getStatus()}, and will trigger status change events to any + * registered listeners. + *

    + * A special case is that if {@code newState} is {@link State#INTERRUPTED}, + * but the previous state was {@link State#INITIALIZING}, the state will remain + * at {@link State#INITIALIZING} because {@link State#INTERRUPTED} + * is only meaningful after a successful startup. + * + * @param newState the data source state + * @param newError information about a new error, if any + * @see DataSourceStatusProvider + */ + void updateStatus(State newState, ErrorInfo newError); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStore.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStore.java new file mode 100644 index 0000000..e2cd363 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStore.java @@ -0,0 +1,107 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import java.io.Closeable; + +/** + * Interface for a data store that holds feature flags and related data received by the SDK. + *

    + * Ordinarily, the only implementations of this interface are the default in-memory implementation, + * which holds references to actual SDK data model objects, and the persistent data store + * implementation that delegates to a {@link PersistentDataStore}. + *

    + * All implementations must permit concurrent access and updates. + * + * @since 5.0.0 + */ +public interface DataStore extends Closeable { + /** + * Overwrites the store's contents with a set of items for each collection. + *

    + * All previous data should be discarded, regardless of versioning. + *

    + * The update should be done atomically. If it cannot be done atomically, then the store + * must first add or update each item in the same order that they are given in the input + * data, and then delete any previously stored items that were not in the input data. + * + * @param allData a list of {@link DataStoreTypes.DataKind} instances and their corresponding data sets + */ + void init(FullDataSet allData); + + /** + * Retrieves an item from the specified collection, if available. + *

    + * If the item has been deleted and the store contains a placeholder, it should + * return that placeholder rather than null. + * + * @param kind specifies which collection to use + * @param key the unique key of the item within that collection + * @return a versioned item that contains the stored data (or placeholder for deleted data); + * null if the key is unknown + */ + ItemDescriptor get(DataKind kind, String key); + + /** + * Retrieves all items from the specified collection. + *

    + * If the store contains placeholders for deleted items, it should include them in + * the results, not filter them out. + * + * @param kind specifies which collection to use + * @return a collection of key-value pairs; the ordering is not significant + */ + KeyedItems getAll(DataKind kind); + + /** + * Updates or inserts an item in the specified collection. For updates, the object will only be + * updated if the existing version is less than the new version. + *

    + * The SDK may pass an {@link ItemDescriptor} that contains a null, to represent a placeholder + * for a deleted item. In that case, assuming the version is greater than any existing version of + * that item, the store should retain that placeholder rather than simply not storing anything. + * + * @param kind specifies which collection to use + * @param key the unique key for the item within that collection + * @param item the item to insert or update + * @return true if the item was updated; false if it was not updated because the store contains + * an equal or greater version + */ + boolean upsert(DataKind kind, String key, ItemDescriptor item); + + /** + * Checks whether this store has been initialized with any data yet. + * + * @return true if the store contains data + */ + boolean isInitialized(); + + /** + * Returns true if this data store implementation supports status monitoring. + *

    + * This is normally only true for persistent data stores created with + * {@link com.launchdarkly.sdk.server.Components#persistentDataStore(ComponentConfigurer)}, + * but it could also be true for any custom {@link DataStore} implementation that makes use of + * {@link ClientContext#getDataStoreUpdateSink()}. + * Returning true means that the store guarantees that if it ever enters an invalid state (that is, an + * operation has failed or it knows that operations cannot succeed at the moment), it will publish a + * status update, and will then publish another status update once it has returned to a valid state. + *

    + * The same value will be returned from {@link DataStoreStatusProvider#isStatusMonitoringEnabled()}. + * + * @return true if status monitoring is enabled + */ + boolean isStatusMonitoringEnabled(); + + /** + * Returns statistics about cache usage, if this data store implementation supports caching. + * + * @return a cache statistics object, or null if not applicable + */ + CacheStats getCacheStats(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreTypes.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreTypes.java new file mode 100644 index 0000000..64677b5 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreTypes.java @@ -0,0 +1,331 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.google.common.collect.ImmutableList; + +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; + +/** + * Types that are used by the {@link DataStore} interface. + *

    + * Applications should never need to use any of these types unless they are implementing a custom + * data store. + * + * @since 5.0.0 + */ +public abstract class DataStoreTypes { + private DataStoreTypes() {} + + /** + * Represents a separately namespaced collection of storable data items. + *

    + * The SDK passes instances of this type to the data store to specify whether it is referring to + * a feature flag, a user segment, etc. The data store implementation should not look for a + * specific data kind (such as feature flags), but should treat all data kinds generically. + */ + public static final class DataKind { + private final String name; + private final Function serializer; + private final Function deserializer; + + /** + * A case-sensitive alphabetic string that uniquely identifies this data kind. + *

    + * This is in effect a namespace for a collection of items of the same kind. Item keys must be + * unique within that namespace. Persistent data store implementations could use this string + * as part of a composite key or table name. + * + * @return the namespace string + */ + public String getName() { + return name; + } + + /** + * Returns a serialized representation of an item of this kind. + *

    + * The SDK uses this function to generate the data that is stored by a {@link PersistentDataStore}. + * Store implementations normally do not need to call it, except in a special case described in the + * documentation for {@link PersistentDataStore} regarding deleted item placeholders. + * + * @param item an {@link ItemDescriptor} describing the object to be serialized + * @return the serialized representation + * @exception ClassCastException if the object is of the wrong class + */ + public String serialize(ItemDescriptor item) { + return serializer.apply(item); + } + + /** + * Creates an item of this kind from its serialized representation. + *

    + * The SDK uses this function to translate data that is returned by a {@link PersistentDataStore}. + * Store implementations do not normally need to call it, but there is a special case described in + * the documentation for {@link PersistentDataStore}, regarding updates. + *

    + * The returned {@link ItemDescriptor} has two properties: {@link ItemDescriptor#getItem()}, which + * is the deserialized object or a {@code null} value for a deleted item placeholder, and + * {@link ItemDescriptor#getVersion()}, which provides the object's version number regardless of + * whether it is deleted or not. + * + * @param s the serialized representation + * @return an {@link ItemDescriptor} describing the deserialized object + */ + public ItemDescriptor deserialize(String s) { + return deserializer.apply(s); + } + + /** + * Constructs a DataKind instance. + * + * @param name the value for {@link #getName()} + * @param serializer the function to use for {@link #serialize(DataStoreTypes.ItemDescriptor)} + * @param deserializer the function to use for {@link #deserialize(String)} + */ + public DataKind(String name, Function serializer, Function deserializer) { + this.name = name; + this.serializer = serializer; + this.deserializer = deserializer; + } + + @Override + public String toString() { + return "DataKind(" + name + ")"; + } + } + + /** + * A versioned item (or placeholder) storable in a {@link DataStore}. + *

    + * This is used for data stores that directly store objects as-is, as the default in-memory + * store does. Items are typed as {@code Object}; the store should not know or care what the + * actual object is. + *

    + * For any given key within a {@link DataKind}, there can be either an existing item with a + * version, or a "tombstone" placeholder representing a deleted item (also with a version). + * Deleted item placeholders are used so that if an item is first updated with version N and + * then deleted with version N+1, but the SDK receives those changes out of order, version N + * will not overwrite the deletion. + *

    + * Persistent data stores use {@link SerializedItemDescriptor} instead. + */ + public static final class ItemDescriptor { + private final int version; + private final Object item; + + /** + * Returns the version number of this data, provided by the SDK. + * + * @return the version number + */ + public int getVersion() { + return version; + } + + /** + * Returns the data item, or null if this is a placeholder for a deleted item. + * + * @return an object or null + */ + public Object getItem() { + return item; + } + + /** + * Constructs a new instance. + * + * @param version the version number + * @param item an object or null + */ + public ItemDescriptor(int version, Object item) { + this.version = version; + this.item = item; + } + + /** + * Convenience method for constructing a deleted item placeholder. + * + * @param version the version number + * @return an ItemDescriptor + */ + public static ItemDescriptor deletedItem(int version) { + return new ItemDescriptor(version, null); + } + + @Override + public boolean equals(Object o) { + if (o instanceof ItemDescriptor) { + ItemDescriptor other = (ItemDescriptor)o; + return version == other.version && Objects.equals(item, other.item); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(version, item); + } + + @Override + public String toString() { + return "ItemDescriptor(" + version + "," + item + ")"; + } + } + + /** + * A versioned item (or placeholder) storable in a {@link PersistentDataStore}. + *

    + * This is equivalent to {@link ItemDescriptor}, but is used for persistent data stores. The + * SDK will convert each data item to and from its serialized string form; the persistent data + * store deals only with the serialized form. + */ + public static final class SerializedItemDescriptor { + private final int version; + private final boolean deleted; + private final String serializedItem; + + /** + * Returns the version number of this data, provided by the SDK. + * @return the version number + */ + public int getVersion() { + return version; + } + + /** + * Returns true if this is a placeholder (tombstone) for a deleted item. If so, + * {@link #getSerializedItem()} will still contain a string representing the deleted item, but + * the persistent store implementation has the option of not storing it if it can represent the + * placeholder in a more efficient way. + * + * @return true if this is a deleted item placeholder + */ + public boolean isDeleted() { + return deleted; + } + + /** + * Returns the data item's serialized representation. This will never be null; for a deleted item + * placeholder, it will contain a special value that can be stored if necessary (see {@link #isDeleted()}). + * + * @return the serialized data or null + */ + public String getSerializedItem() { + return serializedItem; + } + + /** + * Constructs a new instance. + * + * @param version the version number + * @param deleted true if this is a deleted item placeholder + * @param serializedItem the serialized data (will not be null) + */ + public SerializedItemDescriptor(int version, boolean deleted, String serializedItem) { + this.version = version; + this.deleted = deleted; + this.serializedItem = serializedItem; + } + + @Override + public boolean equals(Object o) { + if (o instanceof SerializedItemDescriptor) { + SerializedItemDescriptor other = (SerializedItemDescriptor)o; + return version == other.version && deleted == other.deleted && + Objects.equals(serializedItem, other.serializedItem); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(version, deleted, serializedItem); + } + + @Override + public String toString() { + return "SerializedItemDescriptor(" + version + "," + deleted + "," + serializedItem + ")"; + } + } + + /** + * Wrapper for a set of storable items being passed to a data store. + *

    + * Since the generic type signature for the data set is somewhat complicated (it is an ordered + * list of key-value pairs where each key is a {@link DataKind}, and each value is another ordered + * list of key-value pairs for the individual data items), this type simplifies the declaration of + * data store methods and makes it easier to see what the type represents. + * + * @param will be {@link ItemDescriptor} or {@link SerializedItemDescriptor} + */ + public static final class FullDataSet { + private final Iterable>> data; + + /** + * Returns the wrapped data set. + * + * @return an enumeration of key-value pairs; may be empty, but will not be null + */ + public Iterable>> getData() { + return data; + } + + /** + * Constructs a new instance. + * + * @param data the data set + */ + public FullDataSet(Iterable>> data) { + this.data = data == null ? ImmutableList.of(): data; + } + + @Override + public boolean equals(Object o) { + return o instanceof FullDataSet && data.equals(((FullDataSet)o).data); + } + + @Override + public int hashCode() { + return data.hashCode(); + } + } + + /** + * Wrapper for a set of storable items being passed to a data store, within a single + * {@link DataKind}. + * + * @param will be {@link ItemDescriptor} or {@link SerializedItemDescriptor} + */ + public static final class KeyedItems { + private final Iterable> items; + + /** + * Returns the wrapped data set. + * + * @return an enumeration of key-value pairs; may be empty, but will not be null + */ + public Iterable> getItems() { + return items; + } + + /** + * Constructs a new instance. + * + * @param items the data set + */ + public KeyedItems(Iterable> items) { + this.items = items == null ? ImmutableList.of() : items; + } + + @Override + public boolean equals(Object o) { + return o instanceof KeyedItems && items.equals(((KeyedItems)o).items); + } + + @Override + public int hashCode() { + return items.hashCode(); + } + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreUpdateSink.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreUpdateSink.java new file mode 100644 index 0000000..e419842 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DataStoreUpdateSink.java @@ -0,0 +1,20 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; + +/** + * Interface that a data store implementation can use to report information back to the SDK. + * + * @since 5.0.0 + * @see ClientContext#getDataStoreUpdateSink() + */ +public interface DataStoreUpdateSink { + /** + * Reports a change in the data store's operational status. + *

    + * This is what makes the status monitoring mechanisms in {@link DataStoreStatusProvider} work. + * + * @param newStatus the updated status properties + */ + void updateStatus(DataStoreStatusProvider.Status newStatus); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DiagnosticDescription.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DiagnosticDescription.java new file mode 100644 index 0000000..c54a0a4 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/DiagnosticDescription.java @@ -0,0 +1,27 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.LDValue; + +/** + * Optional interface for components to describe their own configuration. + *

    + * The SDK uses a simplified JSON representation of its configuration when recording diagnostics data. + * Any class that implements {@link ComponentConfigurer} may choose to contribute + * values to this representation, although the SDK may or may not use them. For components that do not + * implement this interface, the SDK may instead describe them using {@code getClass().getSimpleName()}. + *

    + * The {@link #describeConfiguration(ClientContext)} method should return either null or a JSON value. For + * custom components, the value must be a string that describes the basic nature of this component + * implementation (e.g. "Redis"). Built-in LaunchDarkly components may instead return a JSON object + * containing multiple properties specific to the LaunchDarkly diagnostic schema. + * + * @since 4.12.0 + */ +public interface DiagnosticDescription { + /** + * Used internally by the SDK to inspect the configuration. + * @param clientContext allows access to the client configuration + * @return an {@link LDValue} or null + */ + LDValue describeConfiguration(ClientContext clientContext); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventProcessor.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventProcessor.java new file mode 100644 index 0000000..2381b80 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventProcessor.java @@ -0,0 +1,99 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.MigrationOpTracker; + +import java.io.Closeable; + +/** + * Interface for an object that can send or store analytics events. + *

    + * Application code normally does not need to interact with this interface. It is provided + * to allow a custom implementation or test fixture to be substituted for the SDK's normal + * analytics event logic. + * + * @since 4.0.0 + */ +public interface EventProcessor extends Closeable { + /** + * Constant used with {@link #recordEvaluationEvent}. + */ + public static final int NO_VERSION = -1; + + /** + * Records the action of evaluating a feature flag. + *

    + * Depending on the feature flag properties and event properties, this may be transmitted to + * the events service as an individual event, or may only be added into summary data. + * + * @param context the evaluation context + * @param flagKey key of the feature flag that was evaluated + * @param flagVersion the version of the flag, or {@link #NO_VERSION} if the flag was not found + * @param variation the result variation index, or {@link EvaluationDetail#NO_VARIATION} if evaluation failed + * @param value the result value + * @param reason the evaluation reason, or null if the reason was not requested + * @param defaultValue the default value parameter for the evaluation + * @param prerequisiteOfFlagKey the key of the flag that this flag was evaluated as a prerequisite of, + * or null if this flag was evaluated for itself + * @param requireFullEvent true if full-fidelity analytics events should be sent for this flag + * @param debugEventsUntilDate if non-null, debug events are to be generated until this millisecond time + * @param excludeFromSummaries true if the event evaluation should not be included in summaries + * @param samplingRatio ratio used to control event sampling + */ + void recordEvaluationEvent( + LDContext context, + String flagKey, + int flagVersion, + int variation, + LDValue value, + EvaluationReason reason, + LDValue defaultValue, + String prerequisiteOfFlagKey, + boolean requireFullEvent, + Long debugEventsUntilDate, + boolean excludeFromSummaries, + Long samplingRatio + ); + + /** + * Registers an evaluation context, as when the SDK's {@code identify} method is called. + * + * @param context the evaluation context + */ + void recordIdentifyEvent( + LDContext context + ); + + /** + * Creates a custom event, as when the SDK's {@code track} method is called. + * + * @param context the evaluation context + * @param eventKey the event key + * @param data optional custom data provided for the event, may be null or {@link LDValue#ofNull()} if not used + * @param metricValue optional numeric metric value provided for the event, or null + */ + void recordCustomEvent( + LDContext context, + String eventKey, + LDValue data, + Double metricValue + ); + + /** + * Creates a migration event when the SDK's {@code trackMigration} method is called. + * + * @param tracker Migration tracker which was used to track details of the migration operation. + */ + void recordMigrationEvent(MigrationOpTracker tracker); + + /** + * Specifies that any buffered events should be sent as soon as possible, rather than waiting + * for the next flush interval. This method is asynchronous, so events still may not be sent + * until a later time. However, calling {@link Closeable#close()} will synchronously deliver + * any events that were not yet delivered prior to shutting down. + */ + void flush(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventSender.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventSender.java new file mode 100644 index 0000000..20b4e29 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/EventSender.java @@ -0,0 +1,60 @@ +package com.launchdarkly.sdk.server.subsystems; + +import java.io.Closeable; +import java.net.URI; + +/** + * Interface for a component that can deliver preformatted event data. + *

    + * By default, the SDK sends event data to the LaunchDarkly events service via HTTP. You may + * provide a different implementation of event delivery by implementing this interface-- for + * instance, to create a test fixture, or to store the data somewhere else. + * + * @see com.launchdarkly.sdk.server.integrations.EventProcessorBuilder#eventSender(ComponentConfigurer) + * @since 4.14.0 + */ +public interface EventSender extends Closeable { + /** + * Result type for event sending methods. + */ + public enum Result { + /** + * The EventSender successfully delivered the event(s). + */ + SUCCESS, + + /** + * The EventSender was not able to deliver the events. + */ + FAILURE, + + /** + * The EventSender was not able to deliver the events, and the nature of the error indicates that + * the SDK should not attempt to send any more events. + */ + STOP + }; + + /** + * Attempt to deliver an analytics event data payload. + *

    + * This method will be called synchronously from an event delivery worker thread. + * + * @param data the preformatted JSON data, in UTF-8 encoding + * @param eventCount the number of individual events in the data + * @param eventsBaseUri the configured events endpoint base URI + * @return a {@link Result} + */ + Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri); + + /** + * Attempt to deliver a diagnostic event data payload. + *

    + * This method will be called synchronously from an event delivery worker thread. + * + * @param data the preformatted JSON data, in UTF-8 encoding + * @param eventsBaseUri the configured events endpoint base URI + * @return a {@link Result} + */ + Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HookConfiguration.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HookConfiguration.java new file mode 100644 index 0000000..07a5bd5 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HookConfiguration.java @@ -0,0 +1,31 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.Hook; + +import java.util.Collections; +import java.util.List; + +/** + * Encapsulates the SDK's 'hooks' configuration. + *

    + * Use {@link HooksConfigurationBuilder} to construct an instance. + */ +public class HookConfiguration { + + private final List hooks; + + /** + * @param hooks the list of {@link Hook} that will be registered. + */ + public HookConfiguration(List hooks) { + this.hooks = Collections.unmodifiableList(hooks); + } + + /** + * @return an immutable list of hooks + */ + public List getHooks() { + return hooks; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HttpConfiguration.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HttpConfiguration.java new file mode 100644 index 0000000..d0b25ec --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/HttpConfiguration.java @@ -0,0 +1,141 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; + +import java.net.Proxy; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; + +import javax.net.SocketFactory; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.X509TrustManager; + +import static java.util.Collections.emptyMap; + +/** + * Encapsulates top-level HTTP configuration that applies to all SDK components. + *

    + * Use {@link HttpConfigurationBuilder} to construct an instance. + *

    + * The SDK's built-in components use OkHttp as the HTTP client implementation, but since OkHttp types + * are not surfaced in the public API and custom components might use some other implementation, this + * class only provides the properties that would be used to create an HTTP client; it does not create + * the client itself. SDK implementation code uses its own helper methods to do so. + * + * @since 4.13.0 + */ +public final class HttpConfiguration { + private final Duration connectTimeout; + private final Map defaultHeaders; + private final Proxy proxy; + private final HttpAuthentication proxyAuthentication; + private final SocketFactory socketFactory; + private final Duration socketTimeout; + private final SSLSocketFactory sslSocketFactory; + private final X509TrustManager trustManager; + + /** + * Creates an instance. + * + * @param connectTimeout see {@link #getConnectTimeout()} + * @param defaultHeaders see {@link #getDefaultHeaders()} + * @param proxy see {@link #getProxy()} + * @param proxyAuthentication see {@link #getProxyAuthentication()} + * @param socketFactory see {@link #getSocketFactory()} + * @param socketTimeout see {@link #getSocketTimeout()} + * @param sslSocketFactory see {@link #getSslSocketFactory()} + * @param trustManager see {@link #getTrustManager()} + */ + public HttpConfiguration(Duration connectTimeout, Map defaultHeaders, Proxy proxy, + HttpAuthentication proxyAuthentication, SocketFactory socketFactory, Duration socketTimeout, + SSLSocketFactory sslSocketFactory, X509TrustManager trustManager) { + super(); + this.connectTimeout = connectTimeout == null ? HttpConfigurationBuilder.DEFAULT_CONNECT_TIMEOUT : connectTimeout; + this.defaultHeaders = defaultHeaders == null ? emptyMap() : new HashMap<>(defaultHeaders); + this.proxy = proxy; + this.proxyAuthentication = proxyAuthentication; + this.socketFactory = socketFactory; + this.socketTimeout = socketTimeout == null ? HttpConfigurationBuilder.DEFAULT_SOCKET_TIMEOUT : socketTimeout; + this.sslSocketFactory = sslSocketFactory; + this.trustManager = trustManager; + } + + /** + * The connection timeout. This is the time allowed for the underlying HTTP client to connect + * to the LaunchDarkly server. + * + * @return the connection timeout; never null + */ + public Duration getConnectTimeout() { + return connectTimeout; + } + + /** + * Returns the basic headers that should be added to all HTTP requests from SDK components to + * LaunchDarkly services, based on the current SDK configuration. + * + * @return a list of HTTP header names and values + */ + public Iterable> getDefaultHeaders() { + return defaultHeaders.entrySet(); + } + + /** + * The proxy configuration, if any. + * + * @return a {@link Proxy} instance or null + */ + public Proxy getProxy() { + return proxy; + } + + /** + * The authentication method to use for a proxy, if any. Ignored if {@link #getProxy()} is null. + * + * @return an {@link HttpAuthentication} implementation or null + */ + public HttpAuthentication getProxyAuthentication() { + return proxyAuthentication; + } + + /** + * The socket timeout. This is the amount of time without receiving data on a connection that the + * SDK will tolerate before signaling an error. This does not apply to the streaming connection + * used by {@link com.launchdarkly.sdk.server.Components#streamingDataSource()}, which has its own + * non-configurable read timeout based on the expected behavior of the LaunchDarkly streaming service. + * + * @return the socket timeout; never null + */ + public Duration getSocketTimeout() { + return socketTimeout; + } + + /** + * The configured socket factory for insecure connections. + * + * @return a SocketFactory or null + */ + public SocketFactory getSocketFactory() { + return socketFactory; + } + + /** + * The configured socket factory for secure connections. + * + * @return a SSLSocketFactory or null + */ + public SSLSocketFactory getSslSocketFactory() { + return sslSocketFactory; + } + + /** + * The configured trust manager for secure connections, if custom certificate verification is needed. + * + * @return an X509TrustManager or null + */ + public X509TrustManager getTrustManager() { + return trustManager; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/LoggingConfiguration.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/LoggingConfiguration.java new file mode 100644 index 0000000..51bfc69 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/LoggingConfiguration.java @@ -0,0 +1,65 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder; + +import java.time.Duration; + +/** + * Encapsulates the SDK's general logging configuration. + *

    + * Use {@link LoggingConfigurationBuilder} to construct an instance. + * + * @since 5.0.0 + */ +public final class LoggingConfiguration { + private final String baseLoggerName; + private final LDLogAdapter logAdapter; + private final Duration logDataSourceOutageAsErrorAfter; + + /** + * Creates an instance. + * + * @param baseLoggerName see {@link #getBaseLoggerName()} + * @param logAdapter see {@link #getLogAdapter()} + * @param logDataSourceOutageAsErrorAfter see {@link #getLogDataSourceOutageAsErrorAfter()} + */ + public LoggingConfiguration( + String baseLoggerName, + LDLogAdapter logAdapter, + Duration logDataSourceOutageAsErrorAfter + ) { + this.baseLoggerName = baseLoggerName; + this.logAdapter = logAdapter; + this.logDataSourceOutageAsErrorAfter = logDataSourceOutageAsErrorAfter; + } + + /** + * Returns the configured base logger name. + * @return the logger name + * @since 5.10.0 + */ + public String getBaseLoggerName() { + return baseLoggerName; + } + + /** + * Returns the configured logging adapter. + * @return the logging adapter + * @since 5.10.0 + */ + public LDLogAdapter getLogAdapter() { + return logAdapter; + } + + /** + * The time threshold, if any, after which the SDK will log a data source outage at {@code ERROR} + * level instead of {@code WARN} level. + * + * @return the error logging threshold, or null + * @see LoggingConfigurationBuilder#logDataSourceOutageAsErrorAfter(java.time.Duration) + */ + public Duration getLogDataSourceOutageAsErrorAfter() { + return logDataSourceOutageAsErrorAfter; + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/PersistentDataStore.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/PersistentDataStore.java new file mode 100644 index 0000000..1f4cfda --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/PersistentDataStore.java @@ -0,0 +1,144 @@ +package com.launchdarkly.sdk.server.subsystems; + +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; + +import java.io.Closeable; + +/** + * Interface for a data store that holds feature flags and related data in a serialized form. + *

    + * This interface should be used for database integrations, or any other data store + * implementation that stores data in some external service. The SDK will take care of + * converting between its own internal data model and a serialized string form; the data + * store interacts only with the serialized form. The SDK will also provide its own caching + * layer on top of the persistent data store; the data store implementation should not + * provide caching, but simply do every query or update that the SDK tells it to do. + *

    + * Implementations must be thread-safe. + *

    + * Conceptually, each item in the store is a {@link SerializedItemDescriptor} which always has + * a version number, and can represent either a serialized object or a placeholder (tombstone) + * for a deleted item. There are two approaches a persistent store implementation can use for + * persisting this data: + *

    + * 1. Preferably, it should store the version number and the {@link SerializedItemDescriptor#isDeleted()} + * state separately so that the object does not need to be fully deserialized to read them. In + * this case, deleted item placeholders can ignore the value of {@link SerializedItemDescriptor#getSerializedItem()} + * on writes and can set it to null on reads. The store should never call {@link DataKind#deserialize(String)} + * or {@link DataKind#serialize(DataStoreTypes.ItemDescriptor)}. + *

    + * 2. If that isn't possible, then the store should simply persist the exact string from + * {@link SerializedItemDescriptor#getSerializedItem()} on writes, and return the persisted + * string on reads (returning zero for the version and false for {@link SerializedItemDescriptor#isDeleted()}). + * The string is guaranteed to provide the SDK with enough information to infer the version and + * the deleted state. On updates, the store must call {@link DataKind#deserialize(String)} in + * order to inspect the version number of the existing item if any. + *

    + * Error handling is defined as follows: if any data store operation encounters a database error, or + * is otherwise unable to complete its task, it should throw a {@code RuntimeException} to make the SDK + * aware of this. The SDK will log the exception and will assume that the data store is now in a + * non-operational state; the SDK will then start polling {@link #isStoreAvailable()} to determine + * when the store has started working again. + * + * @since 5.0.0 + */ +public interface PersistentDataStore extends Closeable { + /** + * Overwrites the store's contents with a set of items for each collection. + *

    + * All previous data should be discarded, regardless of versioning. + *

    + * The update should be done atomically. If it cannot be done atomically, then the store + * must first add or update each item in the same order that they are given in the input + * data, and then delete any previously stored items that were not in the input data. + * + * @param allData a list of {@link DataStoreTypes.DataKind} instances and their corresponding data sets + */ + void init(FullDataSet allData); + + /** + * Retrieves an item from the specified collection, if available. + *

    + * If the key is not known at all, the method should return null. Otherwise, it should return + * a {@link SerializedItemDescriptor} as follows: + *

    + * 1. If the version number and deletion state can be determined without fully deserializing + * the item, then the store should set those properties in the {@link SerializedItemDescriptor} + * (and can set {@link SerializedItemDescriptor#getSerializedItem()} to null for deleted items). + *

    + * 2. Otherwise, it should simply set {@link SerializedItemDescriptor#getSerializedItem()} to + * the exact string that was persisted, and can leave the other properties as zero/false. See + * comments on {@link PersistentDataStore} for more about this. + * + * @param kind specifies which collection to use + * @param key the unique key of the item within that collection + * @return a versioned item that contains the stored data (or placeholder for deleted data); + * null if the key is unknown + */ + SerializedItemDescriptor get(DataKind kind, String key); + + /** + * Retrieves all items from the specified collection. + *

    + * If the store contains placeholders for deleted items, it should include them in the results, + * not filter them out. See {@link #get(DataStoreTypes.DataKind, String)} for how to set the properties of the + * {@link SerializedItemDescriptor} for each item. + * + * @param kind specifies which collection to use + * @return a collection of key-value pairs; the ordering is not significant + */ + KeyedItems getAll(DataKind kind); + + /** + * Updates or inserts an item in the specified collection. + *

    + * If the given key already exists in that collection, the store must check the version number + * of the existing item (even if it is a deleted item placeholder); if that version is greater + * than or equal to the version of the new item, the update fails and the method returns false. + * If the store is not able to determine the version number of an existing item without fully + * deserializing the existing item, then it is allowed to call {@link DataKind#deserialize(String)} + * for that purpose. + *

    + * If the item's {@link SerializedItemDescriptor#isDeleted()} method returns true, this is a + * deleted item placeholder. The store must persist this, rather than simply removing the key + * from the store. The SDK will provide a string in {@link SerializedItemDescriptor#getSerializedItem()} + * which the store can persist for this purpose; or, if the store is capable of persisting the + * version number and deleted state without storing anything else, it should do so. + * + * @param kind specifies which collection to use + * @param key the unique key for the item within that collection + * @param item the item to insert or update + * @return true if the item was updated; false if it was not updated because the store contains + * an equal or greater version + */ + boolean upsert(DataKind kind, String key, SerializedItemDescriptor item); + + /** + * Returns true if this store has been initialized. + *

    + * In a shared data store, the implementation should be able to detect this state even if + * {@link #init} was called in a different process, i.e. it must query the underlying + * data store in some way. The method does not need to worry about caching this value; the SDK + * will call it rarely. + * + * @return true if the store has been initialized + */ + boolean isInitialized(); + + /** + * Tests whether the data store seems to be functioning normally. + *

    + * This should not be a detailed test of different kinds of operations, but just the smallest possible + * operation to determine whether (for instance) we can reach the database. + *

    + * Whenever one of the store's other methods throws an exception, the SDK will assume that it may have + * become unavailable (e.g. the database connection was lost). The SDK will then call + * {@link #isStoreAvailable()} at intervals until it returns true. + * + * @return true if the underlying data store is reachable + */ + public boolean isStoreAvailable(); +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/SerializationException.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/SerializationException.java new file mode 100644 index 0000000..c714e96 --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/SerializationException.java @@ -0,0 +1,23 @@ +package com.launchdarkly.sdk.server.subsystems; + +/** + * General exception class for all errors in serializing or deserializing JSON. + *

    + * The SDK uses this class to avoid depending on exception types from the underlying JSON framework + * that it uses (currently Gson). + *

    + * This is currently an unchecked exception, because adding checked exceptions to existing SDK + * interfaces would be a breaking change. In the future it will become a checked exception, to make + * error-handling requirements clearer. However, public SDK client methods will not throw this + * exception in any case; it is only relevant when implementing custom components. + */ +@SuppressWarnings("serial") +public class SerializationException extends RuntimeException { + /** + * Creates an instance. + * @param cause the underlying exception + */ + public SerializationException(Throwable cause) { + super(cause); + } +} diff --git a/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/package-info.java b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/package-info.java new file mode 100644 index 0000000..12031bd --- /dev/null +++ b/lib/sdk/server/src/main/java/com/launchdarkly/sdk/server/subsystems/package-info.java @@ -0,0 +1,13 @@ +/** + * Interfaces for implementation of LaunchDarkly SDK components. + *

    + * Most applications will not need to refer to these types. You will use them if you are creating a + * plugin component, such as a database integration. They are also used as interfaces for the built-in + * SDK components, so that plugin components can be used interchangeably with those: for instance, the + * configuration method {@link com.launchdarkly.sdk.server.LDConfig.Builder#dataStore(ComponentConfigurer)} + * references {@link com.launchdarkly.sdk.server.subsystems.DataStore} as an abstraction for the data + * store component. + *

    + * The package also includes concrete types that are used as parameters within these interfaces. + */ +package com.launchdarkly.sdk.server.subsystems; diff --git a/lib/sdk/server/src/templates/java/com/launchdarkly/sdk/server/Version.java b/lib/sdk/server/src/templates/java/com/launchdarkly/sdk/server/Version.java new file mode 100644 index 0000000..acfd3be --- /dev/null +++ b/lib/sdk/server/src/templates/java/com/launchdarkly/sdk/server/Version.java @@ -0,0 +1,8 @@ +package com.launchdarkly.sdk.server; + +abstract class Version { + private Version() {} + + // This constant is updated automatically by our Gradle script during a release, if the project version has changed + static final String SDK_VERSION = "@VERSION@"; +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BaseTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BaseTest.java new file mode 100644 index 0000000..19c81c5 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BaseTest.java @@ -0,0 +1,51 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogCapture; +import com.launchdarkly.logging.Logs; + +import org.junit.Rule; +import org.junit.rules.TestWatcher; +import org.junit.runner.Description; + +@SuppressWarnings("javadoc") +public class BaseTest { + @Rule public DumpLogIfTestFails dumpLogIfTestFails; + + protected final LDLogAdapter testLogging; + protected final LDLogger testLogger; + protected final LogCapture logCapture; + + protected BaseTest() { + logCapture = Logs.capture(); + testLogging = logCapture; + testLogger = LDLogger.withAdapter(testLogging, ""); + dumpLogIfTestFails = new DumpLogIfTestFails(); + } + + /** + * Creates a configuration builder with the basic properties that we want for all tests unless + * otherwise specified: do not connect to an external data source, do not send events, and + * redirect all logging to the test logger for the current test (which will be printed to the + * console only if the test fails). + * + * @return a configuraiton builder + */ + protected LDConfig.Builder baseConfig() { + return new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.noEvents()) + .logging(Components.logging(testLogging).level(LDLogLevel.DEBUG)); + } + + class DumpLogIfTestFails extends TestWatcher { + @Override + protected void failed(Throwable e, Description description) { + for (LogCapture.Message message: logCapture.getMessages()) { + System.out.println("LOG {" + description.getDisplayName() + "} >>> " + message.toStringWithTimestamp()); + } + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImplTest.java new file mode 100644 index 0000000..923b1ab --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreStatusProviderImplTest.java @@ -0,0 +1,61 @@ +package com.launchdarkly.sdk.server; + +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.same; +import static org.junit.Assert.assertEquals; + +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.StatusListener; + +import org.easymock.EasyMockSupport; +import org.junit.Before; +import org.junit.Test; + +@SuppressWarnings("javadoc") +public class BigSegmentStoreStatusProviderImplTest extends BaseTest { + + // We don't need to extensively test status broadcasting behavior, just that the implementation + // delegates to the BigSegmentStoreWrapper and EventBroadcasterImpl. + + private StatusListener mockStatusListener; + private EventBroadcasterImpl mockEventBroadcaster; + private final EasyMockSupport mocks = new EasyMockSupport(); + + @Before + @SuppressWarnings("unchecked") + public void setup() { + mockEventBroadcaster = mocks.strictMock(EventBroadcasterImpl.class); + mockStatusListener = mocks.strictMock(StatusListener.class); + } + + @Test + public void statusUnavailableWithNullWrapper() { + mocks.replayAll(); + BigSegmentStoreStatusProviderImpl statusProvider = new BigSegmentStoreStatusProviderImpl(mockEventBroadcaster, null); + assertEquals(statusProvider.getStatus(), new Status(false, false)); + mocks.verifyAll(); + } + + @Test + public void statusDelegatedToWrapper() { + BigSegmentStoreWrapper storeWrapper = mocks.strictMock(BigSegmentStoreWrapper.class); + expect(storeWrapper.getStatus()).andReturn(new Status(true, false)).once(); + mocks.replayAll(); + + BigSegmentStoreStatusProviderImpl statusProvider = new BigSegmentStoreStatusProviderImpl(mockEventBroadcaster, storeWrapper); + assertEquals(statusProvider.getStatus(), new Status(true, false)); + mocks.verifyAll(); + } + + @Test + public void listenersDelegatedToEventBroadcaster() { + mockEventBroadcaster.register(same(mockStatusListener)); + mockEventBroadcaster.unregister(same(mockStatusListener)); + mocks.replayAll(); + + BigSegmentStoreStatusProviderImpl statusProvider = new BigSegmentStoreStatusProviderImpl(mockEventBroadcaster, null); + statusProvider.addStatusListener(mockStatusListener); + statusProvider.removeStatusListener(mockStatusListener); + mocks.verifyAll(); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapperTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapperTest.java new file mode 100644 index 0000000..60dc11e --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/BigSegmentStoreWrapperTest.java @@ -0,0 +1,277 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.server.BigSegmentStoreWrapper.BigSegmentsQueryResult; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider.StatusListener; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import org.easymock.EasyMockSupport; +import org.junit.Before; +import org.junit.Test; + +import java.time.Duration; +import java.util.Collections; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.nullLogger; +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.isA; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class BigSegmentStoreWrapperTest extends BaseTest { + private static final String SDK_KEY = "sdk-key"; + + private final EasyMockSupport mocks = new EasyMockSupport(); + private AtomicBoolean storeUnavailable; + private AtomicReference storeMetadata; + private BigSegmentStore storeMock; + private ComponentConfigurer storeFactoryMock; + private EventBroadcasterImpl eventBroadcaster; + + @SuppressWarnings("unchecked") + @Before + public void setup() { + eventBroadcaster = EventBroadcasterImpl.forBigSegmentStoreStatus(sharedExecutor, nullLogger); + storeUnavailable = new AtomicBoolean(false); + storeMetadata = new AtomicReference<>(null); + storeMock = mocks.niceMock(BigSegmentStore.class); + expect(storeMock.getMetadata()).andAnswer(() -> { + if (storeUnavailable.get()) { + throw new RuntimeException("sorry"); + } + return storeMetadata.get(); + }).anyTimes(); + storeFactoryMock = mocks.strictMock(ComponentConfigurer.class); + expect(storeFactoryMock.build(isA(ClientContext.class))).andReturn(storeMock); + } + + private BigSegmentStoreWrapper makeWrapper(BigSegmentsConfiguration bsConfig) { + return new BigSegmentStoreWrapper(bsConfig, eventBroadcaster, sharedExecutor, testLogger); + } + + private void setStoreMembership(String userKey, Membership membership) { + expect(storeMock.getMembership(BigSegmentStoreWrapper.hashForUserKey(userKey))).andReturn(membership); + } + + @Test + public void membershipQueryWithUncachedResultAndHealthyStatus() throws Exception { + Membership expectedMembership = createMembershipFromSegmentRefs(Collections.singleton("key1"), Collections.singleton("key2")); + + String userKey = "userkey"; + setStoreMembership(userKey, expectedMembership); + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis())); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .staleAfter(Duration.ofDays(1)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res = wrapper.getUserMembership(userKey); + assertEquals(expectedMembership, res.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res.status); + } + } + + @Test + public void membershipQueryReturnsNull() throws Exception { + String userKey = "userkey"; + setStoreMembership(userKey, null); + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis())); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .staleAfter(Duration.ofDays(1)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res = wrapper.getUserMembership(userKey); + assertEquals(createMembershipFromSegmentRefs(null, null), res.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res.status); + } + } + + @Test + public void membershipQueryWithCachedResultAndHealthyStatus() throws Exception { + Membership expectedMembership = createMembershipFromSegmentRefs(Collections.singleton("key1"), Collections.singleton("key2")); + String userKey = "userkey"; + setStoreMembership(userKey, expectedMembership); + + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis())); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .staleAfter(Duration.ofDays(1)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res1 = wrapper.getUserMembership(userKey); + assertEquals(expectedMembership, res1.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res1.status); + + BigSegmentsQueryResult res2 = wrapper.getUserMembership(userKey); + assertEquals(expectedMembership, res2.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res2.status); + } + } + + @Test + public void membershipQueryWithStaleStatus() throws Exception { + Membership expectedMembership = createMembershipFromSegmentRefs(Collections.singleton("key1"), Collections.singleton("key2")); + String userKey = "userkey"; + setStoreMembership(userKey, expectedMembership); + + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis() - 1000)); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .staleAfter(Duration.ofMillis(500)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res = wrapper.getUserMembership(userKey); + assertEquals(expectedMembership, res.membership); + assertEquals(BigSegmentsStatus.STALE, res.status); + } + } + + @Test + public void membershipQueryWithStaleStatusDueToNoStoreMetadata() throws Exception { + Membership expectedMembership = createMembershipFromSegmentRefs(Collections.singleton("key1"), Collections.singleton("key2")); + String userKey = "userkey"; + setStoreMembership(userKey, expectedMembership); + + mocks.replayAll(); + + storeMetadata.set(null); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .staleAfter(Duration.ofMillis(500)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res = wrapper.getUserMembership(userKey); + assertEquals(expectedMembership, res.membership); + assertEquals(BigSegmentsStatus.STALE, res.status); + } + } + + @Test + public void leastRecentUserIsEvictedFromCache() throws Exception { + String userKey1 = "userkey1", userKey2 = "userkey2", userKey3 = "userkey3"; + Membership expectedMembership1 = createMembershipFromSegmentRefs(Collections.singleton("seg1"), null); + Membership expectedMembership2 = createMembershipFromSegmentRefs(Collections.singleton("seg2"), null); + Membership expectedMembership3 = createMembershipFromSegmentRefs(Collections.singleton("seg3"), null); + setStoreMembership(userKey1, expectedMembership1); + setStoreMembership(userKey2, expectedMembership2); + setStoreMembership(userKey3, expectedMembership3); + setStoreMembership(userKey1, expectedMembership1); + + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis())); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .userCacheSize(2) + .staleAfter(Duration.ofDays(1)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + BigSegmentsQueryResult res1 = wrapper.getUserMembership(userKey1); + assertEquals(expectedMembership1, res1.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res1.status); + + BigSegmentsQueryResult res2 = wrapper.getUserMembership(userKey2); + assertEquals(expectedMembership2, res2.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res2.status); + + BigSegmentsQueryResult res3 = wrapper.getUserMembership(userKey3); + assertEquals(expectedMembership3, res3.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res3.status); + + BigSegmentsQueryResult res2a = wrapper.getUserMembership(userKey2); + assertEquals(expectedMembership2, res2a.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res2a.status); + + BigSegmentsQueryResult res3a = wrapper.getUserMembership(userKey3); + assertEquals(expectedMembership3, res3a.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res3a.status); + + BigSegmentsQueryResult res1a = wrapper.getUserMembership(userKey1); + assertEquals(expectedMembership1, res1a.membership); + assertEquals(BigSegmentsStatus.HEALTHY, res1a.status); + } + } + + @Test + public void pollingDetectsStoreUnavailability() throws Exception { + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis())); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .statusPollInterval(Duration.ofMillis(10)) + .staleAfter(Duration.ofDays(1)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + assertTrue(wrapper.getStatus().isAvailable()); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + eventBroadcaster.register(statuses::add); + + storeUnavailable.set(true); + Status status1 = statuses.take(); + // Depending on timing, the listener might or might not receive an initial status that is still + // available prior to the one that is unavailable. + if (status1.isAvailable()) { + status1 = statuses.take(); + } + assertFalse(status1.isAvailable()); + assertEquals(status1, wrapper.getStatus()); + + storeUnavailable.set(false); + Status status2 = statuses.take(); + assertTrue(status2.isAvailable()); + assertEquals(status2, wrapper.getStatus()); + } + } + + @Test + public void pollingDetectsStaleStatus() throws Exception { + mocks.replayAll(); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis() + 10000)); + BigSegmentsConfiguration bsConfig = Components.bigSegments(storeFactoryMock) + .statusPollInterval(Duration.ofMillis(10)) + .staleAfter(Duration.ofMillis(200)) + .build(clientContext(SDK_KEY, new LDConfig.Builder().build())); + try (BigSegmentStoreWrapper wrapper = makeWrapper(bsConfig)) { + assertFalse(wrapper.getStatus().isStale()); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + eventBroadcaster.register(statuses::add); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis() - 1000)); + // Depending on timing, the listener might or might not receive an initial status that is not + // stale prior to the one that is stale. + Status status1 = statuses.take(); + if (!status1.isStale()) { + status1 = statuses.take(); + } + assertTrue(status1.isStale()); + assertEquals(status1, wrapper.getStatus()); + + storeMetadata.set(new StoreMetadata(System.currentTimeMillis() + 10000)); + Status status2 = statuses.take(); + assertFalse(status2.isStale()); + assertEquals(status2, wrapper.getStatus()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ClientContextImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ClientContextImplTest.java new file mode 100644 index 0000000..49d9975 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ClientContextImplTest.java @@ -0,0 +1,110 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class ClientContextImplTest { + private static final String SDK_KEY = "sdk-key"; + + @Test + public void getBasicDefaultProperties() { + LDConfig config = new LDConfig.Builder().build(); + + ClientContext c = ClientContextImpl.fromConfig(SDK_KEY, config, null); + + assertEquals(SDK_KEY, c.getSdkKey()); + assertFalse(c.isOffline()); + assertEquals(Thread.MIN_PRIORITY, c.getThreadPriority()); + + HttpConfiguration httpConfig = c.getHttp(); + assertEquals(HttpConfigurationBuilder.DEFAULT_CONNECT_TIMEOUT, httpConfig.getConnectTimeout()); + + LoggingConfiguration loggingConfig = c.getLogging(); + assertEquals(LoggingConfigurationBuilder.DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER, + loggingConfig.getLogDataSourceOutageAsErrorAfter()); + } + + @Test + public void getBasicPropertiesWithCustomConfig() { + LDConfig config = new LDConfig.Builder() + .http(Components.httpConfiguration().connectTimeout(Duration.ofSeconds(10))) + .logging(Components.logging().logDataSourceOutageAsErrorAfter(Duration.ofMinutes(20))) + .offline(true) + .threadPriority(Thread.MAX_PRIORITY) + .build(); + + ClientContext c = ClientContextImpl.fromConfig(SDK_KEY, config, sharedExecutor); + + assertEquals(SDK_KEY, c.getSdkKey()); + assertTrue(c.isOffline()); + assertEquals(Thread.MAX_PRIORITY, c.getThreadPriority()); + + HttpConfiguration httpConfig = c.getHttp(); + assertEquals(Duration.ofSeconds(10), httpConfig.getConnectTimeout()); + + LoggingConfiguration loggingConfig = c.getLogging(); + assertEquals(Duration.ofMinutes(20), loggingConfig.getLogDataSourceOutageAsErrorAfter()); + } + + @Test + public void getPackagePrivateSharedExecutor() { + LDConfig config = new LDConfig.Builder().build(); + + ClientContext c = ClientContextImpl.fromConfig(SDK_KEY, config, sharedExecutor); + + assertSame(sharedExecutor, ClientContextImpl.get(c).sharedExecutor); + } + + @Test + public void getPackagePrivateDiagnosticAccumulator() { + LDConfig config = new LDConfig.Builder().build(); + + ClientContext c = ClientContextImpl.fromConfig(SDK_KEY, config, sharedExecutor); + + assertNotNull(ClientContextImpl.get(c).diagnosticStore); + } + + @Test + public void diagnosticStoreIsNullIfOptedOut() { + LDConfig config = new LDConfig.Builder() + .diagnosticOptOut(true) + .build(); + + ClientContext c = ClientContextImpl.fromConfig(SDK_KEY, config, sharedExecutor); + + assertNull(ClientContextImpl.get(c).diagnosticStore); + } + + @Test + public void packagePrivatePropertiesHaveDefaultsIfContextIsNotOurImplementation() { + // This covers a scenario where a user has created their own ClientContext and it has been + // passed to one of our SDK components. + ClientContext c = new ClientContext(SDK_KEY); + + ClientContextImpl impl = ClientContextImpl.get(c); + + assertNotNull(impl.sharedExecutor); + assertNull(impl.diagnosticStore); + + ClientContextImpl impl2 = ClientContextImpl.get(c); + + assertNotNull(impl2.sharedExecutor); + assertSame(impl.sharedExecutor, impl2.sharedExecutor); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelDependenciesTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelDependenciesTest.java new file mode 100644 index 0000000..e92603b --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelDependenciesTest.java @@ -0,0 +1,381 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModelDependencies.DependencyTracker; +import com.launchdarkly.sdk.server.DataModelDependencies.KindAndKey; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import org.junit.Test; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static com.google.common.collect.Iterables.transform; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toDataMap; +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentRuleBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +@SuppressWarnings("javadoc") +public class DataModelDependenciesTest { + @Test + public void computeDependenciesFromFlag() { + FeatureFlag flag1 = flagBuilder("key").build(); + + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataModel.FEATURES, + new ItemDescriptor(flag1.getVersion(), flag1) + ), + emptyIterable() + ); + + FeatureFlag flag2 = ModelBuilders.flagBuilder("key") + .prerequisites( + prerequisite("flag2", 0), + prerequisite("flag3", 0) + ) + .rules( + ruleBuilder() + .clauses( + clause("key", Operator.in, LDValue.of("ignore")), + clauseMatchingSegment("segment1", "segment2") + ) + .build(), + ruleBuilder() + .clauses( + clauseMatchingSegment("segment3") + ) + .build() + ) + .build(); + + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataModel.FEATURES, + new ItemDescriptor(flag2.getVersion(), flag2) + ), + contains( + new KindAndKey(FEATURES, "flag2"), + new KindAndKey(FEATURES, "flag3"), + new KindAndKey(SEGMENTS, "segment1"), + new KindAndKey(SEGMENTS, "segment2"), + new KindAndKey(SEGMENTS, "segment3") + ) + ); + } + + @Test + public void computeDependenciesFromSegment() { + Segment segment = segmentBuilder("segment").build(); + + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataModel.SEGMENTS, + new ItemDescriptor(segment.getVersion(), segment) + ), + emptyIterable() + ); + } + + @Test + public void computeDependenciesFromUnknownDataKind() { + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataStoreTestTypes.TEST_ITEMS, + new ItemDescriptor(1, new DataStoreTestTypes.TestItem("x", 1)) + ), + emptyIterable() + ); + } + + @Test + public void computeDependenciesFromNullItem() { + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataModel.FEATURES, + new ItemDescriptor(1, null) + ), + emptyIterable() + ); + + assertThat( + DataModelDependencies.computeDependenciesFrom( + DataModel.FEATURES, + null + ), + emptyIterable() + ); + } + + @Test + public void sortAllCollections() { + FullDataSet result = DataModelDependencies.sortAllCollections(DEPENDENCY_ORDERING_TEST_DATA); + verifySortedData(result, DEPENDENCY_ORDERING_TEST_DATA); + } + + @SuppressWarnings("unchecked") + @Test + public void sortAllCollectionsLeavesItemsOfUnknownDataKindUnchanged() { + TestItem extraItem1 = new TestItem("item1", 1); + TestItem extraItem2 = new TestItem("item2", 1); + FullDataSet inputData = new DataBuilder() + .addAny(FEATURES, + flagBuilder("a") + .prerequisites(prerequisite("b", 0), prerequisite("c", 0)).build(), + flagBuilder("b") + .prerequisites(prerequisite("c", 0)).build(), + flagBuilder("c").build()) + .addAny(SEGMENTS) + .addAny(TEST_ITEMS, extraItem1, extraItem2) + .build(); + + FullDataSet result = DataModelDependencies.sortAllCollections(inputData); + assertThat(result.getData(), iterableWithSize(3)); + + // the unknown data kind appears last, and the ordering of its items is unchanged + assertThat(transform(result.getData(), coll -> coll.getKey()), + contains(SEGMENTS, FEATURES, TEST_ITEMS)); + assertThat(Iterables.get(result.getData(), 2).getValue().getItems(), + contains(extraItem1.toKeyedItemDescriptor(), extraItem2.toKeyedItemDescriptor())); + } + + static void verifySortedData(FullDataSet sortedData, FullDataSet inputData) { + Map> dataMap = toDataMap(sortedData); + assertEquals(2, dataMap.size()); + Map> inputDataMap = toDataMap(inputData); + + // Segments should always come first + assertEquals(SEGMENTS, Iterables.get(dataMap.keySet(), 0)); + assertEquals(inputDataMap.get(SEGMENTS).size(), Iterables.get(dataMap.values(), 0).size()); + + // Features should be ordered so that a flag always appears after its prerequisites, if any + assertEquals(FEATURES, Iterables.get(dataMap.keySet(), 1)); + Map map1 = Iterables.get(dataMap.values(), 1); + List list1 = ImmutableList.copyOf(transform(map1.values(), d -> (DataModel.FeatureFlag)d.getItem())); + assertEquals(inputDataMap.get(FEATURES).size(), map1.size()); + for (int itemIndex = 0; itemIndex < list1.size(); itemIndex++) { + DataModel.FeatureFlag item = list1.get(itemIndex); + for (DataModel.Prerequisite prereq: item.getPrerequisites()) { + DataModel.FeatureFlag depFlag = (DataModel.FeatureFlag)map1.get(prereq.getKey()).getItem(); + int depIndex = list1.indexOf(depFlag); + if (depIndex > itemIndex) { + fail(String.format("%s depends on %s, but %s was listed first; keys in order are [%s]", + item.getKey(), prereq.getKey(), item.getKey(), + Joiner.on(", ").join(map1.keySet()))); + } + } + } + } + + @Test + public void dependencyTrackerReturnsSingleValueResultForUnknownItem() { + DependencyTracker dt = new DependencyTracker(); + + // a change to any item with no known depenencies affects only itself + verifyAffectedItems(dt, FEATURES, "flag1", + new KindAndKey(FEATURES, "flag1")); + } + + @Test + public void dependencyTrackerBuildsGraph() { + DependencyTracker dt = new DependencyTracker(); + + Segment segment1 = segmentBuilder("segment1").build(); + Segment segment2 = segmentBuilder("segment2"). + rules(segmentRuleBuilder().clauses(clauseMatchingSegment("segment3")).build()) + .build(); + Segment segment3 = segmentBuilder("segment3").build(); + + FeatureFlag flag1 = flagBuilder("flag1") + .prerequisites( + prerequisite("flag2", 0), + prerequisite("flag3", 0) + ) + .rules( + ruleBuilder() + .clauses( + clauseMatchingSegment("segment1", "segment2") + ) + .build() + ) + .build(); + + FeatureFlag flag2 = flagBuilder("flag2") + .prerequisites( + prerequisite("flag4", 0) + ) + .rules( + ruleBuilder() + .clauses( + clauseMatchingSegment("segment2") + ) + .build() + ) + .build(); + + for (Segment s: new Segment[] {segment1, segment2, segment3}) { + dt.updateDependenciesFrom(SEGMENTS, s.getKey(), new ItemDescriptor(s.getVersion(), s)); + } + for (FeatureFlag f: new FeatureFlag[] {flag1, flag2}) { + dt.updateDependenciesFrom(FEATURES, f.getKey(), new ItemDescriptor(f.getVersion(), f)); + } + + // a change to flag1 affects only flag1 + verifyAffectedItems(dt, FEATURES, "flag1", + new KindAndKey(FEATURES, "flag1")); + + // a change to flag2 affects flag2 and flag1 + verifyAffectedItems(dt, FEATURES, "flag2", + new KindAndKey(FEATURES, "flag2"), + new KindAndKey(FEATURES, "flag1")); + + // a change to flag3 affects flag3 and flag1 + verifyAffectedItems(dt, FEATURES, "flag3", + new KindAndKey(FEATURES, "flag3"), + new KindAndKey(FEATURES, "flag1")); + + // a change to segment1 affects segment1 and flag1 + verifyAffectedItems(dt, SEGMENTS, "segment1", + new KindAndKey(SEGMENTS, "segment1"), + new KindAndKey(FEATURES, "flag1")); + + // a change to segment2 affects segment2, flag1, and flag2 + verifyAffectedItems(dt, SEGMENTS, "segment2", + new KindAndKey(SEGMENTS, "segment2"), + new KindAndKey(FEATURES, "flag1"), + new KindAndKey(FEATURES, "flag2")); + + // a change to segment3 affects segment3, segment2, flag1, and flag2 + verifyAffectedItems(dt, SEGMENTS, "segment3", + new KindAndKey(SEGMENTS, "segment3"), + new KindAndKey(SEGMENTS, "segment2"), + new KindAndKey(FEATURES, "flag1"), + new KindAndKey(FEATURES, "flag2")); + } + + @Test + public void dependencyTrackerUpdatesGraph() { + DependencyTracker dt = new DependencyTracker(); + + FeatureFlag flag1 = ModelBuilders.flagBuilder("flag1") + .prerequisites(prerequisite("flag3", 0)) + .build(); + dt.updateDependenciesFrom(FEATURES, flag1.getKey(), new ItemDescriptor(flag1.getVersion(), flag1)); + + FeatureFlag flag2 = ModelBuilders.flagBuilder("flag2") + .prerequisites(prerequisite("flag3", 0)) + .build(); + dt.updateDependenciesFrom(FEATURES, flag2.getKey(), new ItemDescriptor(flag2.getVersion(), flag2)); + + // at this point, a change to flag3 affects flag3, flag2, and flag1 + verifyAffectedItems(dt, FEATURES, "flag3", + new KindAndKey(FEATURES, "flag3"), + new KindAndKey(FEATURES, "flag2"), + new KindAndKey(FEATURES, "flag1")); + + // now make it so flag1 now depends on flag4 instead of flag2 + FeatureFlag flag1v2 = ModelBuilders.flagBuilder("flag1") + .prerequisites(prerequisite("flag4", 0)) + .build(); + dt.updateDependenciesFrom(FEATURES, flag1.getKey(), new ItemDescriptor(flag1v2.getVersion(), flag1v2)); + + // now, a change to flag3 affects flag3 and flag2 + verifyAffectedItems(dt, FEATURES, "flag3", + new KindAndKey(FEATURES, "flag3"), + new KindAndKey(FEATURES, "flag2")); + + // and a change to flag4 affects flag4 and flag1 + verifyAffectedItems(dt, FEATURES, "flag4", + new KindAndKey(FEATURES, "flag4"), + new KindAndKey(FEATURES, "flag1")); + } + + @Test + public void dependencyTrackerResetsGraph() { + DependencyTracker dt = new DependencyTracker(); + + FeatureFlag flag1 = ModelBuilders.flagBuilder("flag1") + .prerequisites(prerequisite("flag3", 0)) + .build(); + dt.updateDependenciesFrom(FEATURES, flag1.getKey(), new ItemDescriptor(flag1.getVersion(), flag1)); + + verifyAffectedItems(dt, FEATURES, "flag3", + new KindAndKey(FEATURES, "flag3"), + new KindAndKey(FEATURES, "flag1")); + + dt.reset(); + + verifyAffectedItems(dt, FEATURES, "flag3", + new KindAndKey(FEATURES, "flag3")); + } + + private void verifyAffectedItems(DependencyTracker dt, DataKind kind, String key, KindAndKey... expected) { + Set result = new HashSet<>(); + dt.addAffectedItems(result, new KindAndKey(kind, key)); + assertThat(result, equalTo(ImmutableSet.copyOf(expected))); + } + + @Test + public void kindAndKeyEquality() { + KindAndKey kk1 = new KindAndKey(FEATURES, "key1"); + KindAndKey kk2 = new KindAndKey(FEATURES, "key1"); + assertThat(kk1, equalTo(kk2)); + assertThat(kk2, equalTo(kk1)); + assertThat(kk1.hashCode(), equalTo(kk2.hashCode())); + + KindAndKey kk3 = new KindAndKey(FEATURES, "key2"); + assertThat(kk3, not(equalTo(kk1))); + assertThat(kk1, not(equalTo(kk3))); + + KindAndKey kk4 = new KindAndKey(SEGMENTS, "key1"); + assertThat(kk4, not(equalTo(kk1))); + assertThat(kk1, not(equalTo(kk4))); + + assertThat(kk1, not(equalTo(null))); + assertThat(kk1, not(equalTo("x"))); + } + + static FullDataSet DEPENDENCY_ORDERING_TEST_DATA = + new DataBuilder() + .addAny(FEATURES, + flagBuilder("a") + .prerequisites(prerequisite("b", 0), prerequisite("c", 0)).build(), + flagBuilder("b") + .prerequisites(prerequisite("c", 0), prerequisite("e", 0)).build(), + flagBuilder("c").build(), + flagBuilder("d").build(), + flagBuilder("e").build(), + flagBuilder("f").build()) + .addAny(SEGMENTS, + segmentBuilder("o").build()) + .build(); +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelPreprocessingTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelPreprocessingTest.java new file mode 100644 index 0000000..0fb0fc9 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelPreprocessingTest.java @@ -0,0 +1,380 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.Target; +import com.launchdarkly.sdk.server.DataModelPreprocessing.ClausePreprocessed; + +import org.junit.Test; + +import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.List; + +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentRuleBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class DataModelPreprocessingTest { + // We deliberately use the data model constructors here instead of the more convenient ModelBuilders + // equivalents, to make sure we're testing the afterDeserialization() behavior and not just the builder. + + private static final LDValue aValue = LDValue.of("a"), bValue = LDValue.of("b"); + + private FeatureFlag flagFromClause(Clause c) { + return new FeatureFlag("key", 0, false, null, null, null, null, rulesFromClause(c), + null, null, null, false, false, false, null, false, null, null, false); + } + + private List rulesFromClause(Clause c) { + return ImmutableList.of(new Rule("", ImmutableList.of(c), null, null, false)); + } + + @Test + public void preprocessFlagAddsPrecomputedOffResult() { + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, null, null, + ImmutableList.of(), null, + 0, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + assertThat(f.preprocessed, notNullValue()); + assertThat(f.preprocessed.offResult, + equalTo(EvalResult.of(aValue, 0, EvaluationReason.off()))); + } + + @Test + public void preprocessFlagAddsPrecomputedOffResultForNullOffVariation() { + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, null, null, + ImmutableList.of(), null, + null, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + assertThat(f.preprocessed, notNullValue()); + assertThat(f.preprocessed.offResult, + equalTo(EvalResult.of(LDValue.ofNull(), EvaluationDetail.NO_VARIATION, EvaluationReason.off()))); + } + + @Test + public void preprocessFlagAddsPrecomputedFallthroughResults() { + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, null, null, + ImmutableList.of(), null, 0, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + assertThat(f.preprocessed, notNullValue()); + assertThat(f.preprocessed.fallthroughResults, notNullValue()); + EvaluationReason regularReason = EvaluationReason.fallthrough(false); + EvaluationReason inExperimentReason = EvaluationReason.fallthrough(true); + + assertThat(f.preprocessed.fallthroughResults.forVariation(0, false), + equalTo(EvalResult.of(aValue, 0, regularReason))); + assertThat(f.preprocessed.fallthroughResults.forVariation(0, true), + equalTo(EvalResult.of(aValue, 0, inExperimentReason))); + + assertThat(f.preprocessed.fallthroughResults.forVariation(1, false), + equalTo(EvalResult.of(bValue, 1, regularReason))); + assertThat(f.preprocessed.fallthroughResults.forVariation(1, true), + equalTo(EvalResult.of(bValue, 1, inExperimentReason))); + } + + @Test + public void preprocessFlagAddsPrecomputedTargetMatchResults() { + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, + ImmutableList.of(new Target(null, ImmutableSet.of(), 1)), + null, ImmutableList.of(), null, 0, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + Target t = f.getTargets().get(0); + assertThat(t.preprocessed, notNullValue()); + assertThat(t.preprocessed.targetMatchResult, + equalTo(EvalResult.of(bValue, 1, EvaluationReason.targetMatch()))); + } + + @Test + public void preprocessFlagAddsPrecomputedPrerequisiteFailedResults() { + FeatureFlag f = new FeatureFlag("key", 0, false, + ImmutableList.of(new Prerequisite("abc", 1)), + null, null, null, + ImmutableList.of(), null, 0, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + Prerequisite p = f.getPrerequisites().get(0); + assertThat(p.preprocessed, notNullValue()); + assertThat(p.preprocessed.prerequisiteFailedResult, + equalTo(EvalResult.of(aValue, 0, EvaluationReason.prerequisiteFailed("abc")))); + } + + @Test + public void preprocessFlagAddsPrecomputedResultsToFlagRulesWithRollout() { + + List variations = new ArrayList<>(); + variations.add(new DataModel.WeightedVariation(0, 50000, false)); + variations.add(new DataModel.WeightedVariation(1, 50000, false)); + DataModel.RolloutKind kind = DataModel.RolloutKind.rollout; + Integer seed = 123; + DataModel.Rollout rollout = new DataModel.Rollout(null, variations, null, kind, seed); + + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, null, null, + ImmutableList.of(new Rule("ruleid0", ImmutableList.of(), null, rollout, false)), + null, null, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + Rule rule = f.getRules().get(0); + assertThat(rule.preprocessed, notNullValue()); + assertThat(rule.preprocessed.allPossibleResults, notNullValue()); + EvaluationReason regularReason = EvaluationReason.ruleMatch(0, "ruleid0", false); + EvaluationReason inExperimentReason = EvaluationReason.ruleMatch(0, "ruleid0", true); + + assertThat(rule.preprocessed.allPossibleResults.forVariation(0, false), + equalTo(EvalResult.of(aValue, 0, regularReason))); + assertThat(rule.preprocessed.allPossibleResults.forVariation(0, true), + equalTo(EvalResult.of(aValue, 0, inExperimentReason))); + + assertThat(rule.preprocessed.allPossibleResults.forVariation(1, false), + equalTo(EvalResult.of(bValue, 1, regularReason))); + assertThat(rule.preprocessed.allPossibleResults.forVariation(1, true), + equalTo(EvalResult.of(bValue, 1, inExperimentReason))); + } + + @Test + public void preprocessFlagAddsPrecomputedResultsToFlagRulesWithJustVariation() { + FeatureFlag f = new FeatureFlag("key", 0, false, null, null, null, null, + ImmutableList.of(new Rule("ruleid0", ImmutableList.of(), 0, null, false)), + null, null, + ImmutableList.of(aValue, bValue), + false, false, false, null, false, null, null, false); + + f.afterDeserialized(); + + Rule rule = f.getRules().get(0); + assertThat(rule.preprocessed, notNullValue()); + assertThat(rule.preprocessed.allPossibleResults, notNullValue()); + EvaluationReason regularReason = EvaluationReason.ruleMatch(0, "ruleid0", false); + EvaluationReason inExperimentReason = EvaluationReason.ruleMatch(0, "ruleid0", true); + + assertThat(rule.preprocessed.allPossibleResults.forVariation(0, false), + equalTo(EvalResult.of(aValue, 0, regularReason))); + assertThat(rule.preprocessed.allPossibleResults.forVariation(0, true), + equalTo(EvalResult.of(aValue, 0, inExperimentReason))); + + assertThat(rule.preprocessed.allPossibleResults.forVariation(1, false), + equalTo(EvalResult.error(EvaluationReason.ErrorKind.EXCEPTION))); + assertThat(rule.preprocessed.allPossibleResults.forVariation(1, true), + equalTo(EvalResult.error(EvaluationReason.ErrorKind.EXCEPTION))); + } + + @Test + public void preprocessFlagCreatesClauseValuesMapForMultiValueEqualityTest() { + Clause c = clause( + "x", + Operator.in, + LDValue.of("a"), LDValue.of(0) + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + ClausePreprocessed ce = f.getRules().get(0).getClauses().get(0).preprocessed; + assertNotNull(ce); + assertEquals(ImmutableSet.of(LDValue.of("a"), LDValue.of(0)), ce.valuesSet); + } + + @Test + public void preprocessFlagDoesNotCreateClauseValuesMapForSingleValueEqualityTest() { + Clause c = clause( + "x", + Operator.in, + LDValue.of("a") + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + } + + @Test + public void preprocessFlagDoesNotCreateClauseValuesMapForEmptyEqualityTest() { + Clause c = clause( + "x", + Operator.in + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + } + + @Test + public void preprocessFlagDoesNotCreateClauseValuesMapForNonEqualityOperators() { + for (Operator op: Operator.getBuiltins()) { + if (op == Operator.in) { + continue; + } + Clause c = clause( + "x", + op, + LDValue.of("a"), LDValue.of("b") + ); + // The values & types aren't very important here because we won't actually evaluate the clause; all that + // matters is that there's more than one of them, so that it *would* build a map if the operator were "in" + + FeatureFlag f = flagFromClause(c); + assertNull(op.toString(), f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + ClausePreprocessed ce = f.getRules().get(0).getClauses().get(0).preprocessed; + // this might be non-null if we preprocessed the values list, but there should still not be a valuesSet + if (ce != null) { + assertNull(ce.valuesSet); + } + } + } + + @Test + public void preprocessFlagParsesClauseDate() { + String time1Str = "2016-04-16T17:09:12-07:00"; + Instant time1 = ZonedDateTime.parse(time1Str).toInstant(); + int time2Num = 1000000; + Instant time2 = Instant.ofEpochMilli(time2Num); + + for (Operator op: new Operator[] { Operator.after, Operator.before }) { + Clause c = clause( + "x", + op, + LDValue.of(time1Str), LDValue.of(time2Num), LDValue.of("x"), LDValue.of(false) + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + ClausePreprocessed ce = f.getRules().get(0).getClauses().get(0).preprocessed; + assertNotNull(op.name(), ce); + assertNotNull(op.name(), ce.valuesExtra); + assertEquals(op.name(), 4, ce.valuesExtra.size()); + assertEquals(op.name(), time1, ce.valuesExtra.get(0).parsedDate); + assertEquals(op.name(), time2, ce.valuesExtra.get(1).parsedDate); + assertNull(op.name(), ce.valuesExtra.get(2).parsedDate); + assertNull(op.name(), ce.valuesExtra.get(3).parsedDate); + } + } + + @Test + public void preprocessFlagParsesClauseRegex() { + Clause c = clause( + "x", + Operator.matches, + LDValue.of("x*"), LDValue.of("***not a regex"), LDValue.of(3) + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + ClausePreprocessed ce = f.getRules().get(0).getClauses().get(0).preprocessed; + assertNotNull(ce); + assertNotNull(ce.valuesExtra); + assertEquals(3, ce.valuesExtra.size()); + assertNotNull(ce.valuesExtra.get(0).parsedRegex); + assertEquals("x*", ce.valuesExtra.get(0).parsedRegex.toString()); // Pattern doesn't support equals() + assertNull(ce.valuesExtra.get(1).parsedRegex); + assertNull(ce.valuesExtra.get(2).parsedRegex); + } + + + @Test + public void preprocessFlagParsesClauseSemVer() { + SemanticVersion expected = EvaluatorTypeConversion.valueToSemVer(LDValue.of("1.2.3")); + assertNotNull(expected); + + for (Operator op: new Operator[] { Operator.semVerEqual, Operator.semVerGreaterThan, Operator.semVerLessThan }) { + Clause c = clause( + "x", + op, + LDValue.of("1.2.3"), LDValue.of("x"), LDValue.of(false) + ); + + FeatureFlag f = flagFromClause(c); + assertNull(f.getRules().get(0).getClauses().get(0).preprocessed); + + f.afterDeserialized(); + + ClausePreprocessed ce = f.getRules().get(0).getClauses().get(0).preprocessed; + assertNotNull(op.name(), ce); + assertNotNull(op.name(), ce.valuesExtra); + assertEquals(op.name(), 3, ce.valuesExtra.size()); + assertNotNull(op.name(), ce.valuesExtra.get(0).parsedSemVer); + assertEquals(op.name(), 0, ce.valuesExtra.get(0).parsedSemVer.compareTo(expected)); // SemanticVersion doesn't support equals() + assertNull(op.name(), ce.valuesExtra.get(1).parsedSemVer); + assertNull(op.name(), ce.valuesExtra.get(2).parsedSemVer); + } + } + + @Test + public void preprocessSegmentPreprocessesClausesInRules() { + // We'll just check one kind of clause, and assume that the preprocessing works the same as in flag rules + Clause c = clause( + "x", + Operator.matches, + LDValue.of("x*") + ); + SegmentRule rule = segmentRuleBuilder().clauses(c).build(); + Segment s = segmentBuilder("key").disablePreprocessing(true).rules(rule).build(); + + assertNull(s.getRules().get(0).getClauses().get(0).preprocessed); + + s.afterDeserialized(); + + ClausePreprocessed ce = s.getRules().get(0).getClauses().get(0).preprocessed; + assertNotNull(ce.valuesExtra); + assertEquals(1, ce.valuesExtra.size()); + assertNotNull(ce.valuesExtra.get(0).parsedRegex); + assertEquals("x*", ce.valuesExtra.get(0).parsedRegex.toString()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelSerializationTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelSerializationTest.java new file mode 100644 index 0000000..ea5203a --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelSerializationTest.java @@ -0,0 +1,767 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.gson.JsonElement; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.Target; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import org.junit.Test; + +import java.util.Collections; +import java.util.function.Consumer; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataModelSerialization.deserializeFromParsedJson; +import static com.launchdarkly.sdk.server.DataModelSerialization.parseFullDataSet; +import static com.launchdarkly.sdk.server.JsonHelpers.serialize; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestUtil.assertDataSetEquals; +import static com.launchdarkly.sdk.server.TestUtil.jsonReaderFrom; +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class DataModelSerializationTest { + + @Test + public void deserializeFlagFromParsedJson() { + String json = "{\"key\":\"flagkey\",\"version\":1}"; + JsonElement element = JsonHelpers.gsonInstance().fromJson(json, JsonElement.class); + VersionedData flag = deserializeFromParsedJson(DataModel.FEATURES, element); + assertEquals(FeatureFlag.class, flag.getClass()); + assertEquals("flagkey", flag.getKey()); + assertEquals(1, flag.getVersion()); + } + + @Test(expected=SerializationException.class) + public void deserializeInvalidFlagFromParsedJson() { + String json = "{\"key\":[3]}"; + JsonElement element = JsonHelpers.gsonInstance().fromJson(json, JsonElement.class); + deserializeFromParsedJson(DataModel.FEATURES, element); + } + + @Test + public void deserializeSegmentFromParsedJson() { + String json = "{\"key\":\"segkey\",\"version\":1}"; + JsonElement element = JsonHelpers.gsonInstance().fromJson(json, JsonElement.class); + VersionedData segment = deserializeFromParsedJson(DataModel.SEGMENTS, element); + assertEquals(Segment.class, segment.getClass()); + assertEquals("segkey", segment.getKey()); + assertEquals(1, segment.getVersion()); + } + + @Test(expected=SerializationException.class) + public void deserializeInvalidSegmentFromParsedJson() { + String json = "{\"key\":[3]}"; + JsonElement element = JsonHelpers.gsonInstance().fromJson(json, JsonElement.class); + deserializeFromParsedJson(DataModel.SEGMENTS, element); + } + + @Test(expected=SerializationException.class) + public void deserializeInvalidDataKindFromParsedJson() { + String json = "{\"key\":\"something\",\"version\":1}"; + JsonElement element = JsonHelpers.gsonInstance().fromJson(json, JsonElement.class); + DataKind mysteryKind = new DataKind("incorrect", null, null); + deserializeFromParsedJson(mysteryKind, element); + } + + @Test + public void flagIsDeserializedWithAllProperties() { + assertFlagFromJson( + flagWithAllPropertiesJson(), + flag -> { + assertFlagHasAllProperties(flag); + + String json1 = FEATURES.serialize(new ItemDescriptor(flag.getVersion(), flag)); + assertFlagFromJson(LDValue.parse(json1), flag1 -> assertFlagHasAllProperties(flag1)); + }); + } + + @Test + public void flagIsDeserializedWithMinimalProperties() { + assertFlagFromJson( + LDValue.buildObject().put("key", "flag-key").put("version", 99).build(), + flag -> { + assertEquals("flag-key", flag.getKey()); + assertEquals(99, flag.getVersion()); + assertFalse(flag.isOn()); + assertNull(flag.getSalt()); + assertNotNull(flag.getTargets()); + assertEquals(0, flag.getTargets().size()); + assertNotNull(flag.getRules()); + assertEquals(0, flag.getRules().size()); + assertNull(flag.getFallthrough()); + assertNull(flag.getOffVariation()); + assertNotNull(flag.getVariations()); + assertEquals(0, flag.getVariations().size()); + assertFalse(flag.isClientSide()); + assertFalse(flag.isTrackEvents()); + assertFalse(flag.isTrackEventsFallthrough()); + assertNull(flag.getDebugEventsUntilDate()); + }); + } + + @Test + public void flagIsDeserializedWithOptionalExperimentProperties() { + String json = LDValue.buildObject() + .put("key", "flag-key") + .put("version", 157) + .put("rules", LDValue.buildArray() + .add(LDValue.buildObject() + .put("id", "id1") + .put("rollout", LDValue.buildObject() + .put("variations", LDValue.buildArray() + .add(LDValue.buildObject() + .put("variation", 2) + .put("weight", 100000) + .build()) + .build()) + .put("bucketBy", "email") + .build()) + .build()) + .build()) + .put("fallthrough", LDValue.buildObject() + .put("variation", 1) + .build()) + .put("offVariation", 2) + .put("variations", LDValue.buildArray().add("a").add("b").add("c").build()) + .build().toJsonString(); + FeatureFlag flag = (FeatureFlag)FEATURES.deserialize(json).getItem(); + assertEquals("flag-key", flag.getKey()); + assertEquals(157, flag.getVersion()); + assertFalse(flag.isOn()); + assertNull(flag.getSalt()); + assertNotNull(flag.getTargets()); + assertEquals(0, flag.getTargets().size()); + assertNotNull(flag.getRules()); + assertEquals(1, flag.getRules().size()); + assertEquals(RolloutKind.rollout, flag.getRules().get(0).getRollout().getKind()); + assertFalse(flag.getRules().get(0).getRollout().isExperiment()); + assertNull(flag.getRules().get(0).getRollout().getSeed()); + assertEquals(2, flag.getRules().get(0).getRollout().getVariations().get(0).getVariation()); + assertEquals(100000, flag.getRules().get(0).getRollout().getVariations().get(0).getWeight()); + assertFalse(flag.getRules().get(0).getRollout().getVariations().get(0).isUntracked()); + assertNotNull(flag.getVariations()); + assertEquals(3, flag.getVariations().size()); + assertFalse(flag.isClientSide()); + assertFalse(flag.isTrackEvents()); + assertFalse(flag.isTrackEventsFallthrough()); + assertNull(flag.getDebugEventsUntilDate()); + } + + @Test + public void flagRuleBasicProperties() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("variation", 2) + .put("clauses", LDValue.arrayOf()) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertEquals("id0", r.getId()); + assertEquals(Integer.valueOf(2), r.getVariation()); + assertNull(r.getRollout()); + assertFalse(r.isTrackEvents()); + }); + } + + @Test + public void flagRuleTrackEvents() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("variation", 2) + .put("clauses", LDValue.arrayOf()) + .put("trackEvents", true) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertTrue(r.isTrackEvents()); + }); + } + + @Test + public void flagRuleRollout() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("rollout", LDValue.buildObject() + .put("variations", LDValue.arrayOf( + LDValue.buildObject() + .put("variation", 2) + .put("weight", 100000) + .build())) + .build()) + .put("clauses", LDValue.arrayOf()) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertNull(r.getVariation()); + assertNotNull(r.getRollout()); + assertEquals(RolloutKind.rollout, r.getRollout().getKind()); + assertNull(r.getRollout().getSeed()); + assertNull(r.getRollout().getContextKind()); + assertNull(r.getRollout().getBucketBy()); + assertEquals(1, r.getRollout().getVariations().size()); + assertEquals(2, r.getRollout().getVariations().get(0).getVariation()); + assertEquals(100000, r.getRollout().getVariations().get(0).getWeight()); + }); + } + + @Test + public void flagRuleRolloutBucketByWithoutContextKind() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("rollout", LDValue.buildObject() + .put("bucketBy", "/attr1") + .put("variations", LDValue.arrayOf( + LDValue.buildObject() + .put("variation", 2) + .put("weight", 100000) + .build())) + .build()) + .put("clauses", LDValue.arrayOf()) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertNotNull(r.getRollout()); + assertEquals(AttributeRef.fromLiteral("/attr1"), r.getRollout().getBucketBy()); + }); + } + + @Test + public void flagRuleRolloutContextKind() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("rollout", LDValue.buildObject() + .put("contextKind", "org") + .put("bucketBy", "/address/street") + .put("variations", LDValue.arrayOf( + LDValue.buildObject() + .put("variation", 2) + .put("weight", 100000) + .build())) + .build()) + .put("clauses", LDValue.arrayOf()) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertNotNull(r.getRollout()); + assertEquals(ContextKind.of("org"), r.getRollout().getContextKind()); + assertEquals(AttributeRef.fromPath("/address/street"), r.getRollout().getBucketBy()); + }); + } + + @Test + public void flagRuleExperiment() { + LDValue ruleJson = LDValue.buildObject() + .put("id", "id0") + .put("rollout", LDValue.buildObject() + .put("kind", "experiment") + .put("variations", LDValue.arrayOf( + LDValue.buildObject() + .put("variation", 2) + .put("weight", 100000) + .build())) + .put("seed", 123) + .build()) + .put("clauses", LDValue.arrayOf()) + .build(); + assertFlagRuleFromJson(ruleJson, r -> { + assertNotNull(r.getRollout()); + assertEquals(RolloutKind.experiment, r.getRollout().getKind()); + assertEquals(Integer.valueOf(123), r.getRollout().getSeed()); + }); + } + + @Test + public void flagClauseWithContextKind() { + LDValue clauseJson = LDValue.buildObject().put("contextKind", "org") + .put("attribute", "/address/street").put("op", "in").put("values", LDValue.arrayOf()).build(); + assertClauseFromJson(clauseJson, c -> { + assertEquals(ContextKind.of("org"), c.getContextKind()); + assertEquals(AttributeRef.fromPath("/address/street"), c.getAttribute()); + }); + } + + @Test + public void flagClauseWithoutContextKind() { + // When there's no context kind, the attribute is interpreted as a literal name even if it has a slash + LDValue clauseJson = LDValue.buildObject() + .put("attribute", "/attr1").put("op", "in").put("values", LDValue.arrayOf()).build(); + assertClauseFromJson(clauseJson, c -> { + assertNull(c.getContextKind()); + assertEquals(AttributeRef.fromLiteral("/attr1"), c.getAttribute()); + }); + } + + @Test + public void flagClauseNegated() { + LDValue clauseJson = LDValue.buildObject().put("negate", true) + .put("attribute", "attr1").put("op", "in").put("values", LDValue.arrayOf()).build(); + assertClauseFromJson(clauseJson, c -> { + assertTrue(c.isNegate()); + }); + } + + @Test + public void deletedFlagIsConvertedToAndFromJsonPlaceholder() { + String json0 = LDValue.buildObject().put("version", 99) + .put("deleted", true).build().toJsonString(); + ItemDescriptor item = FEATURES.deserialize(json0); + assertNotNull(item); + assertNull(item.getItem()); + assertEquals(99, item.getVersion()); + + String json1 = FEATURES.serialize(item); + assertJsonEquals(json0, json1); + } + + @Test + public void segmentIsDeserializedWithAllProperties() { + String json0 = segmentWithAllPropertiesJson().toJsonString(); + Segment segment0 = (Segment)SEGMENTS.deserialize(json0).getItem(); + assertSegmentHasAllProperties(segment0); + + String json1 = SEGMENTS.serialize(new ItemDescriptor(segment0.getVersion(), segment0)); + Segment segment1 = (Segment)SEGMENTS.deserialize(json1).getItem(); + assertSegmentHasAllProperties(segment1); + } + + @Test + public void segmentIsDeserializedWithMinimalProperties() { + String json = LDValue.buildObject().put("key", "segment-key").put("version", 99).build().toJsonString(); + Segment segment = (Segment)SEGMENTS.deserialize(json).getItem(); + assertEquals("segment-key", segment.getKey()); + assertEquals(99, segment.getVersion()); + assertNotNull(segment.getIncluded()); + assertEquals(0, segment.getIncluded().size()); + assertNotNull(segment.getExcluded()); + assertEquals(0, segment.getExcluded().size()); + assertNotNull(segment.getRules()); + assertEquals(0, segment.getRules().size()); + assertFalse(segment.isUnbounded()); + assertNull(segment.getGeneration()); + } + + @Test + public void segmentUnboundedWithoutContextKind() { + LDValue segmentJson = LDValue.buildObject().put("key", "segmentkey").put("version", 1) + .put("unbounded", true).put("generation", 10).build(); + assertSegmentFromJson(segmentJson, s -> { + assertTrue(s.isUnbounded()); + assertNull(s.getUnboundedContextKind()); + assertEquals(Integer.valueOf(10), s.getGeneration()); + }); + } + + @Test + public void segmentUnboundedWithContextKind() { + LDValue segmentJson = LDValue.buildObject().put("key", "segmentkey").put("version", 1) + .put("unbounded", true).put("unboundedContextKind", "org").put("generation", 10).build(); + assertSegmentFromJson(segmentJson, s -> { + assertTrue(s.isUnbounded()); + assertEquals(ContextKind.of("org"), s.getUnboundedContextKind()); + assertEquals(Integer.valueOf(10), s.getGeneration()); + }); + } + + @Test + public void segmentRuleByWithoutRollout() { + LDValue ruleJson = LDValue.buildObject() + .put("clauses", LDValue.arrayOf( + LDValue.buildObject().put("attribute", "attr1").put("op", "in").put("values", LDValue.arrayOf(LDValue.of(3))).build() + )) + .build(); + assertSegmentRuleFromJson(ruleJson, r -> { + assertNull(r.getWeight()); + assertNull(r.getRolloutContextKind()); + assertNull(r.getBucketBy()); + assertEquals(1, r.getClauses().size()); + assertEquals(AttributeRef.fromLiteral("attr1"), r.getClauses().get(0).getAttribute()); + assertEquals(Operator.in, r.getClauses().get(0).getOp()); + assertEquals(ImmutableList.of(LDValue.of(3)), r.getClauses().get(0).getValues()); + }); + } + + @Test + public void segmentRuleRolloutBucketByWithoutContextKind() { + LDValue ruleJson = LDValue.buildObject() + .put("weight", 50000) + .put("bucketBy", "/attr1") + .put("clauses", LDValue.arrayOf()) + .build(); + assertSegmentRuleFromJson(ruleJson, r -> { + assertEquals(Integer.valueOf(50000), r.getWeight()); + assertNull(r.getRolloutContextKind()); + assertEquals(AttributeRef.fromLiteral("/attr1"), r.getBucketBy()); + }); + } + + @Test + public void segmentRuleRolloutWithContextKind() { + LDValue ruleJson = LDValue.buildObject() + .put("weight", 50000) + .put("rolloutContextKind", "org") + .put("bucketBy", "/address/street") + .put("clauses", LDValue.arrayOf()) + .build(); + assertSegmentRuleFromJson(ruleJson, r -> { + assertEquals(Integer.valueOf(50000), r.getWeight()); + assertEquals(ContextKind.of("org"), r.getRolloutContextKind()); + assertEquals(AttributeRef.fromPath("/address/street"), r.getBucketBy()); + }); + } + + @Test + public void deletedSegmentIsConvertedToAndFromJsonPlaceholder() { + String json0 = LDValue.buildObject().put("version", 99) + .put("deleted", true).build().toJsonString(); + ItemDescriptor item = SEGMENTS.deserialize(json0); + assertNotNull(item); + assertNull(item.getItem()); + assertEquals(99, item.getVersion()); + + String json1 = SEGMENTS.serialize(item); + assertJsonEquals(json0, json1); + } + + @Test + public void explicitNullsAreToleratedForNullableValues() { + // Nulls are not *always* valid-- it is OK to raise a deserialization error if a null appears + // where a non-nullable primitive type like boolean is expected, so for instance "version":null + // is invalid. But for anything that is optional, an explicit null is equivalent to omitting + // the property. Note: it would be nice to use Optional for things like this, but we can't + // do it because Gson does not play well with Optional. + assertFlagFromJson( + baseBuilder("flag-key").put("offVariation", LDValue.ofNull()).build(), + flag -> assertNull(flag.getOffVariation()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("fallthrough", LDValue.buildObject().put("rollout", LDValue.ofNull()).build()) + .build(), + flag -> assertNull(flag.getFallthrough().getRollout()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("fallthrough", LDValue.buildObject().put("variation", LDValue.ofNull()).build()) + .build(), + flag -> assertNull(flag.getFallthrough().getVariation()) + ); + + // Nulls for list values should always be considered equivalent to an empty list, because + // that's how Go would serialize a nil slice + assertFlagFromJson( + baseBuilder("flag-key").put("prerequisites", LDValue.ofNull()).build(), + flag -> assertEquals(Collections.emptyList(), flag.getPrerequisites()) + ); + assertFlagFromJson( + baseBuilder("flag-key").put("rules", LDValue.ofNull()).build(), + flag -> assertEquals(Collections.emptyList(), flag.getRules()) + ); + assertFlagFromJson( + baseBuilder("flag-key").put("targets", LDValue.ofNull()).build(), + flag -> assertEquals(Collections.emptyList(), flag.getTargets()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("rules", LDValue.arrayOf( + LDValue.buildObject().put("clauses", LDValue.ofNull()).build() + )) + .build(), + flag -> assertEquals(Collections.emptyList(), flag.getRules().get(0).getClauses()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("rules", LDValue.arrayOf( + LDValue.buildObject().put("clauses", LDValue.arrayOf( + LDValue.buildObject().put("values", LDValue.ofNull()).build() + )).build() + )) + .build(), + flag -> assertEquals(Collections.emptyList(), + flag.getRules().get(0).getClauses().get(0).getValues()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("targets", LDValue.arrayOf( + LDValue.buildObject().put("values", LDValue.ofNull()).build() + )) + .build(), + flag -> assertEquals(Collections.emptySet(), flag.getTargets().get(0).getValues()) + ); + assertFlagFromJson( + baseBuilder("flag-key") + .put("fallthrough", LDValue.buildObject().put("rollout", + LDValue.buildObject().put("variations", LDValue.ofNull()).build() + ).build()) + .build(), + flag -> assertEquals(Collections.emptyList(), + flag.getFallthrough().getRollout().getVariations()) + ); + assertSegmentFromJson( + baseBuilder("segment-key").put("rules", LDValue.ofNull()).build(), + segment -> assertEquals(Collections.emptyList(), segment.getRules()) + ); + assertSegmentFromJson( + baseBuilder("segment-key") + .put("rules", LDValue.arrayOf( + LDValue.buildObject().put("clauses", LDValue.ofNull()).build() + )) + .build(), + segment -> assertEquals(Collections.emptyList(), segment.getRules().get(0).getClauses()) + ); + assertSegmentFromJson( + baseBuilder("segment-key").put("generation", LDValue.ofNull()).build(), + segment -> assertNull(segment.getGeneration()) + ); + + // Nulls in clause values are not useful since the clause can never match, but they're valid JSON; + // we should normalize them to LDValue.ofNull() to avoid potential NPEs down the line + assertFlagFromJson( + baseBuilder("flag-key") + .put("rules", LDValue.arrayOf( + LDValue.buildObject() + .put("clauses", LDValue.arrayOf( + LDValue.buildObject() + .put("values", LDValue.arrayOf(LDValue.ofNull())) + .build() + )) + .build() + )) + .build(), + flag -> assertEquals(LDValue.ofNull(), + flag.getRules().get(0).getClauses().get(0).getValues().get(0)) + ); + assertSegmentFromJson( + baseBuilder("segment-key") + .put("rules", LDValue.arrayOf( + LDValue.buildObject() + .put("clauses", LDValue.arrayOf( + LDValue.buildObject() + .put("values", LDValue.arrayOf(LDValue.ofNull())) + .build() + )) + .build() + )) + .build(), + segment -> assertEquals(LDValue.ofNull(), + segment.getRules().get(0).getClauses().get(0).getValues().get(0)) + ); + + // Similarly, null for a flag variation isn't a useful value but it is valid JSON + assertFlagFromJson( + baseBuilder("flagKey").put("variations", LDValue.arrayOf(LDValue.ofNull())).build(), + flag -> assertEquals(LDValue.ofNull(), flag.getVariations().get(0)) + ); + } + + @Test + public void parsingFullDataSetEmptyObject() throws Exception { + String json = "{}"; + FullDataSet allData = parseFullDataSet(jsonReaderFrom(json)); + assertDataSetEquals(DataBuilder.forStandardTypes().build(), allData); + } + + @Test + public void parsingFullDataSetFlagsOnly() throws Exception { + FeatureFlag flag = flagBuilder("flag1").version(1000).build(); + String json = "{\"flags\":{\"flag1\":" + serialize(flag) + "}}"; + FullDataSet allData = parseFullDataSet(jsonReaderFrom(json)); + assertDataSetEquals(DataBuilder.forStandardTypes().addAny(FEATURES, flag).build(), allData); + } + + @Test + public void parsingFullDataSetSegmentsOnly() throws Exception { + Segment segment = segmentBuilder("segment1").version(1000).build(); + String json = "{\"segments\":{\"segment1\":" + serialize(segment) + "}}"; + FullDataSet allData = parseFullDataSet(jsonReaderFrom(json)); + assertDataSetEquals(DataBuilder.forStandardTypes().addAny(SEGMENTS, segment).build(), allData); + } + + @Test + public void parsingFullDataSetFlagsAndSegments() throws Exception { + FeatureFlag flag1 = flagBuilder("flag1").version(1000).build(); + FeatureFlag flag2 = flagBuilder("flag2").version(1001).build(); + Segment segment1 = segmentBuilder("segment1").version(1000).build(); + Segment segment2 = segmentBuilder("segment2").version(1001).build(); + String json = "{\"flags\":{\"flag1\":" + serialize(flag1) + ",\"flag2\":" + serialize(flag2) + "}" + + ",\"segments\":{\"segment1\":" + serialize(segment1) + ",\"segment2\":" + serialize(segment2) + "}}"; + FullDataSet allData = parseFullDataSet(jsonReaderFrom(json)); + assertDataSetEquals(DataBuilder.forStandardTypes() + .addAny(FEATURES, flag1, flag2).addAny(SEGMENTS, segment1, segment2).build(), allData); + } + + private void assertFlagFromJson(LDValue flagJson, Consumer action) { + FeatureFlag flag = (FeatureFlag)FEATURES.deserialize(flagJson.toJsonString()).getItem(); + action.accept(flag); + } + + private void assertFlagRuleFromJson(LDValue ruleJson, Consumer action) { + LDValue flagJson = LDValue.buildObject().put("rules", LDValue.arrayOf(ruleJson)).build(); + assertFlagFromJson(flagJson, f -> { + action.accept(f.getRules().get(0)); + }); + } + + private void assertClauseFromJson(LDValue clauseJson, Consumer action) { + LDValue ruleJson = LDValue.buildObject().put("clauses", LDValue.arrayOf(clauseJson)).build(); + assertFlagRuleFromJson(ruleJson, r -> { + action.accept(r.getClauses().get(0)); + }); + } + + private void assertSegmentFromJson(LDValue segmentJson, Consumer action) { + Segment segment = (Segment)SEGMENTS.deserialize(segmentJson.toJsonString()).getItem(); + action.accept(segment); + } + + private void assertSegmentRuleFromJson(LDValue ruleJson, Consumer action) { + LDValue segmentJson = LDValue.buildObject().put("rules", LDValue.arrayOf(ruleJson)).build(); + assertSegmentFromJson(segmentJson, s -> { + action.accept(s.getRules().get(0)); + }); + } + + private ObjectBuilder baseBuilder(String key) { + return LDValue.buildObject().put("key", key).put("version", 99); + } + + private LDValue flagWithAllPropertiesJson() { + return LDValue.buildObject() + .put("key", "flag-key") + .put("version", 99) + .put("on", true) + .put("prerequisites", LDValue.buildArray() + .build()) + .put("salt", "123") + .put("targets", LDValue.buildArray() + .add(LDValue.buildObject() + .put("variation", 1) + .put("values", LDValue.buildArray().add("key1").add("key2").build()) + .build()) + .build()) + .put("contextTargets", LDValue.buildArray() + .add(LDValue.buildObject() + .put("contextKind", "org") + .put("variation", 1) + .put("values", LDValue.buildArray().add("key3").add("key4").build()) + .build()) + .build()) + .put("rules", LDValue.arrayOf()) + .put("fallthrough", LDValue.buildObject() + .put("variation", 1) + .build()) + .put("offVariation", 2) + .put("variations", LDValue.buildArray().add("a").add("b").add("c").build()) + .put("clientSide", true) + .put("trackEvents", true) + .put("trackEventsFallthrough", true) + .put("debugEventsUntilDate", 1000) + .put("samplingRatio", 2) + .put("migration", LDValue.buildObject().put("checkRatio", 3).build()) + .put("excludeFromSummaries", true) + .build(); + } + + private void assertFlagHasAllProperties(FeatureFlag flag) { + assertEquals("flag-key", flag.getKey()); + assertEquals(99, flag.getVersion()); + assertTrue(flag.isOn()); + assertEquals("123", flag.getSalt()); + + assertNotNull(flag.getTargets()); + assertEquals(1, flag.getTargets().size()); + Target t0 = flag.getTargets().get(0); + assertNull(t0.getContextKind()); + assertEquals(1, t0.getVariation()); + assertEquals(ImmutableSet.of("key1", "key2"), t0.getValues()); + + assertNotNull(flag.getContextTargets()); + assertEquals(1, flag.getContextTargets().size()); + Target ct0 = flag.getContextTargets().get(0); + assertEquals(ContextKind.of("org"), ct0.getContextKind()); + assertEquals(1, ct0.getVariation()); + assertEquals(ImmutableSet.of("key3", "key4"), ct0.getValues()); + + assertNotNull(flag.getRules()); + assertEquals(0, flag.getRules().size()); + + assertNotNull(flag.getFallthrough()); + assertEquals(Integer.valueOf(1), flag.getFallthrough().getVariation()); + assertNull(flag.getFallthrough().getRollout()); + assertEquals(Integer.valueOf(2), flag.getOffVariation()); + assertEquals(ImmutableList.of(LDValue.of("a"), LDValue.of("b"), LDValue.of("c")), flag.getVariations()); + assertTrue(flag.isClientSide()); + assertTrue(flag.isTrackEvents()); + assertTrue(flag.isTrackEventsFallthrough()); + assertEquals(Long.valueOf(1000), flag.getDebugEventsUntilDate()); + + assertNotNull(flag.getSamplingRatio()); + assertEquals(2l, flag.getSamplingRatio().longValue()); + assertNotNull(flag.getMigration()); + assertEquals(3l, flag.getMigration().getCheckRatio().longValue()); + assertTrue(flag.isExcludeFromSummaries()); + } + + private LDValue segmentWithAllPropertiesJson() { + return LDValue.buildObject() + .put("key", "segment-key") + .put("version", 99) + .put("included", LDValue.buildArray().add("key1").add("key2").build()) + .put("excluded", LDValue.buildArray().add("key3").add("key4").build()) + .put("includedContexts", LDValue.arrayOf( + LDValue.buildObject().put("contextKind", "kind1").put("values", LDValue.arrayOf(LDValue.of("key5"))).build())) + .put("excludedContexts", LDValue.arrayOf( + LDValue.buildObject().put("contextKind", "kind2").put("values", LDValue.arrayOf(LDValue.of("key6"))).build())) + .put("salt", "123") + .put("rules", LDValue.arrayOf()) + // Extra fields should be ignored + .put("fallthrough", LDValue.buildObject() + .put("variation", 1) + .build()) + .put("variations", LDValue.buildArray().add("a").add("b").add("c").build()) + .build(); + } + + private void assertSegmentHasAllProperties(Segment segment) { + assertEquals("segment-key", segment.getKey()); + assertEquals(99, segment.getVersion()); + assertEquals("123", segment.getSalt()); + assertEquals(ImmutableSet.of("key1", "key2"), segment.getIncluded()); + assertEquals(ImmutableSet.of("key3", "key4"), segment.getExcluded()); + + assertEquals(1, segment.getIncludedContexts().size()); + assertEquals(ContextKind.of("kind1"), segment.getIncludedContexts().get(0).getContextKind()); + assertEquals(ImmutableSet.of("key5"), segment.getIncludedContexts().get(0).getValues()); + assertEquals(1, segment.getExcludedContexts().size()); + assertEquals(ContextKind.of("kind2"), segment.getExcludedContexts().get(0).getContextKind()); + assertEquals(ImmutableSet.of("key6"), segment.getExcludedContexts().get(0).getValues()); + + assertNotNull(segment.getRules()); + assertEquals(0, segment.getRules().size()); + + assertFalse(segment.isUnbounded()); + assertNull(segment.getUnboundedContextKind()); + assertNull(segment.getGeneration()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelTest.java new file mode 100644 index 0000000..9cbbe9c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataModelTest.java @@ -0,0 +1,111 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.Target; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class DataModelTest { + @Test + public void flagPrerequisitesListCanNeverBeNull() { + assertEquals(ImmutableList.of(), flagWithAllZeroValuedFields().getPrerequisites()); + } + + @Test + public void flagTargetsListCanNeverBeNull() { + assertEquals(ImmutableList.of(), flagWithAllZeroValuedFields().getTargets()); + } + + @Test + public void flagContextTargetsListCanNeverBeNull() { + assertEquals(ImmutableList.of(), flagWithAllZeroValuedFields().getContextTargets()); + } + + @Test + public void flagRulesListCanNeverBeNull() { + assertEquals(ImmutableList.of(), flagWithAllZeroValuedFields().getRules()); + } + + @Test + public void flagVariationsListCanNeverBeNull() { + assertEquals(ImmutableList.of(), flagWithAllZeroValuedFields().getVariations()); + } + + @Test + public void targetKeysSetCanNeverBeNull() { + Target t = new Target(null, null, 0); + assertEquals(ImmutableSet.of(), t.getValues()); + } + + @Test + public void ruleClausesListCanNeverBeNull() { + Rule r = new Rule("id", null, null, null, false); + assertEquals(ImmutableList.of(), r.getClauses()); + } + + @Test + public void clauseValuesListCanNeverBeNull() { + Clause c = new Clause(null, null, null, null, false); + assertEquals(ImmutableList.of(), c.getValues()); + } + + @Test + public void segmentIncludedCanNeverBeNull() { + assertEquals(ImmutableSet.of(), segmentWithAllZeroValuedFields().getIncluded()); + } + + @Test + public void segmentExcludedCanNeverBeNull() { + assertEquals(ImmutableSet.of(), segmentWithAllZeroValuedFields().getExcluded()); + } + + @Test + public void segmentIncludedContextsCanNeverBeNull() { + assertEquals(ImmutableList.of(), segmentWithAllZeroValuedFields().getIncludedContexts()); + } + + @Test + public void segmentExcludedContextsCanNeverBeNull() { + assertEquals(ImmutableList.of(), segmentWithAllZeroValuedFields().getExcludedContexts()); + } + + @Test + public void segmentRulesListCanNeverBeNull() { + assertEquals(ImmutableList.of(), segmentWithAllZeroValuedFields().getRules()); + } + + @Test + public void segmentRuleClausesListCanNeverBeNull() { + SegmentRule r = new SegmentRule(null, null, null, null); + assertEquals(ImmutableList.of(), r.getClauses()); + } + + @Test + public void rolloutVariationsListCanNeverBeNull() { + Rollout r = new Rollout(null, null, null, RolloutKind.rollout, null); + assertEquals(ImmutableList.of(), r.getVariations()); + } + + private FeatureFlag flagWithAllZeroValuedFields() { + // This calls the empty constructor directly to simulate a condition where Gson did not set any fields + // and no preprocessing has happened. + return new FeatureFlag(); + } + + private Segment segmentWithAllZeroValuedFields() { + // This calls the empty constructor directly to simulate a condition where Gson did not set any fields + // and no preprocessing has happened. + return new Segment(); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImplTest.java new file mode 100644 index 0000000..d7648a0 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceStatusProviderImplTest.java @@ -0,0 +1,114 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.testhelpers.ConcurrentHelpers; + +import org.junit.Test; + +import java.time.Duration; +import java.time.Instant; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.trySleep; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.sameInstance; + +@SuppressWarnings("javadoc") +public class DataSourceStatusProviderImplTest extends BaseTest { + private EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, testLogger); + private DataSourceUpdatesImpl updates = new DataSourceUpdatesImpl( + TestComponents.inMemoryDataStore(), + null, + null, + broadcaster, + sharedExecutor, + null, + testLogger + ); + private DataSourceStatusProviderImpl statusProvider = new DataSourceStatusProviderImpl(broadcaster, updates); + + @Test + public void getStatus() throws Exception { + assertThat(statusProvider.getStatus().getState(), equalTo(State.INITIALIZING)); + + Instant timeBefore = Instant.now(); + ErrorInfo errorInfo = ErrorInfo.fromHttpError(500); + + updates.updateStatus(State.VALID, errorInfo); + + Status newStatus = statusProvider.getStatus(); + assertThat(newStatus.getState(), equalTo(State.VALID)); + assertThat(newStatus.getStateSince(), greaterThanOrEqualTo(timeBefore)); + assertThat(newStatus.getLastError(), sameInstance(errorInfo)); + } + + @Test + public void statusListeners() throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + statusProvider.addStatusListener(statuses::add); + + BlockingQueue unwantedStatuses = new LinkedBlockingQueue<>(); + DataSourceStatusProvider.StatusListener listener2 = unwantedStatuses::add; + statusProvider.addStatusListener(listener2); + statusProvider.removeStatusListener(listener2); // testing that a listener can be unregistered + + updates.updateStatus(State.VALID, null); + + Status newStatus = ConcurrentHelpers.awaitValue(statuses, 500, TimeUnit.MILLISECONDS); + assertThat(newStatus.getState(), equalTo(State.VALID)); + + assertNoMoreValues(unwantedStatuses, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void waitForStatusWithStatusAlreadyCorrect() throws Exception { + updates.updateStatus(State.VALID, null); + + boolean success = statusProvider.waitFor(State.VALID, Duration.ofMillis(500)); + assertThat(success, equalTo(true)); + } + + @Test + public void waitForStatusSucceeds() throws Exception { + new Thread(() -> { + trySleep(100, TimeUnit.MILLISECONDS); + updates.updateStatus(State.VALID, null); + }).start(); + + boolean success = statusProvider.waitFor(State.VALID, Duration.ZERO); + assertThat(success, equalTo(true)); + } + + @Test + public void waitForStatusTimesOut() throws Exception { + long timeStart = System.currentTimeMillis(); + boolean success = statusProvider.waitFor(State.VALID, Duration.ofMillis(300)); + long timeEnd = System.currentTimeMillis(); + assertThat(success, equalTo(false)); + assertThat(timeEnd - timeStart, greaterThanOrEqualTo(270L)); + } + + @Test + public void waitForStatusEndsIfShutDown() throws Exception { + new Thread(() -> { + updates.updateStatus(State.OFF, null); + }).start(); + + long timeStart = System.currentTimeMillis(); + boolean success = statusProvider.waitFor(State.VALID, Duration.ofMillis(500)); + long timeEnd = System.currentTimeMillis(); + assertThat(success, equalTo(false)); + assertThat(timeEnd - timeStart, lessThan(500L)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceUpdatesImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceUpdatesImplTest.java new file mode 100644 index 0000000..a4fd47c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataSourceUpdatesImplTest.java @@ -0,0 +1,444 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; + +import org.easymock.Capture; +import org.easymock.EasyMock; +import org.easymock.EasyMockSupport; +import org.junit.Test; + +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestComponents.inMemoryDataStore; +import static com.launchdarkly.sdk.server.TestComponents.nullLogger; +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.sdk.server.TestUtil.expectEvents; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.easymock.EasyMock.replay; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; + +@SuppressWarnings("javadoc") +public class DataSourceUpdatesImplTest { + // Note that these tests must use the actual data model types for flags and segments, rather than the + // TestItem type from DataStoreTestTypes, because the dependency behavior is based on the real data model. + + private final EventBroadcasterImpl flagChangeBroadcaster = + EventBroadcasterImpl.forFlagChangeEvents(TestComponents.sharedExecutor, nullLogger); + private final EasyMockSupport mocks = new EasyMockSupport(); + + private DataSourceUpdatesImpl makeInstance(DataStore store) { + return makeInstance(store, null); + } + + private DataSourceUpdatesImpl makeInstance( + DataStore store, + EventBroadcasterImpl statusBroadcaster + ) { + return new DataSourceUpdatesImpl(store, null, flagChangeBroadcaster, statusBroadcaster, sharedExecutor, null, nullLogger); + } + + @Test + public void sendsEventsOnInitForNewlyAddedFlags() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + builder.addAny(FEATURES, flagBuilder("flag2").version(1).build()) + .addAny(SEGMENTS, segmentBuilder("segment2").version(1).build()); + // the new segment triggers no events since nothing is using it + + storeUpdates.init(builder.build()); + + expectEvents(eventSink, "flag2"); + } + + @Test + public void sendsEventOnUpdateForNewlyAddedFlag() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + storeUpdates.upsert(FEATURES, "flag2", new ItemDescriptor(1, flagBuilder("flag2").version(1).build())); + + expectEvents(eventSink, "flag2"); + } + + @Test + public void sendsEventsOnInitForUpdatedFlags() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build(), + segmentBuilder("segment2").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + builder.addAny(FEATURES, flagBuilder("flag2").version(2).build()) // modified flag + .addAny(SEGMENTS, segmentBuilder("segment2").version(2).build()); // modified segment, but it's irrelevant + storeUpdates.init(builder.build()); + + expectEvents(eventSink, "flag2"); + } + + @Test + public void sendsEventOnUpdateForUpdatedFlag() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + storeUpdates.upsert(FEATURES, "flag2", new ItemDescriptor(2, flagBuilder("flag2").version(2).build())); + + expectEvents(eventSink, "flag2"); + } + + @Test + public void doesNotSendsEventOnUpdateIfItemWasNotReallyUpdated() throws Exception { + DataStore store = inMemoryDataStore(); + DataModel.FeatureFlag flag1 = flagBuilder("flag1").version(1).build(); + DataModel.FeatureFlag flag2 = flagBuilder("flag2").version(1).build(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, flag1, flag2); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + storeUpdates.upsert(FEATURES, flag2.getKey(), new ItemDescriptor(flag2.getVersion(), flag2)); + + assertNoMoreValues(eventSink, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void sendsEventsOnInitForDeletedFlags() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + builder.remove(FEATURES, "flag2"); + builder.remove(SEGMENTS, "segment1"); // deleted segment isn't being used so it's irrelevant + // note that the full data set for init() will never include deleted item placeholders + + storeUpdates.init(builder.build()); + + expectEvents(eventSink, "flag2"); + } + + @Test + public void sendsEventOnUpdateForDeletedFlag() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue events = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(events::add); + + storeUpdates.upsert(FEATURES, "flag2", ItemDescriptor.deletedItem(2)); + + expectEvents(events, "flag2"); + } + + @Test + public void sendsEventsOnInitForFlagsWhosePrerequisitesChanged() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).prerequisites(prerequisite("flag1", 0)).build(), + flagBuilder("flag3").version(1).build(), + flagBuilder("flag4").version(1).prerequisites(prerequisite("flag1", 0)).build(), + flagBuilder("flag5").version(1).prerequisites(prerequisite("flag4", 0)).build(), + flagBuilder("flag6").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + builder.addAny(FEATURES, flagBuilder("flag1").version(2).build()); + storeUpdates.init(builder.build()); + + expectEvents(eventSink, "flag1", "flag2", "flag4", "flag5"); + } + + @Test + public void sendsEventsOnUpdateForFlagsWhosePrerequisitesChanged() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).prerequisites(prerequisite("flag1", 0)).build(), + flagBuilder("flag3").version(1).build(), + flagBuilder("flag4").version(1).prerequisites(prerequisite("flag1", 0)).build(), + flagBuilder("flag5").version(1).prerequisites(prerequisite("flag4", 0)).build(), + flagBuilder("flag6").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + storeUpdates.upsert(FEATURES, "flag1", new ItemDescriptor(2, flagBuilder("flag1").version(2).build())); + + expectEvents(eventSink, "flag1", "flag2", "flag4", "flag5"); + } + + @Test + public void sendsEventsOnInitForFlagsWhoseSegmentsChanged() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).rules( + ruleBuilder().clauses( + ModelBuilders.clauseMatchingSegment("segment1") + ).build() + ).build(), + flagBuilder("flag3").version(1).build(), + flagBuilder("flag4").version(1).prerequisites(prerequisite("flag2", 0)).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build(), + segmentBuilder("segment2").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + storeUpdates.upsert(SEGMENTS, "segment1", new ItemDescriptor(2, segmentBuilder("segment1").version(2).build())); + + expectEvents(eventSink, "flag2", "flag4"); + } + + @Test + public void sendsEventsOnUpdateForFlagsWhoseSegmentsChanged() throws Exception { + DataStore store = inMemoryDataStore(); + DataBuilder builder = new DataBuilder() + .addAny(FEATURES, + flagBuilder("flag1").version(1).build(), + flagBuilder("flag2").version(1).rules( + ruleBuilder().clauses( + ModelBuilders.clauseMatchingSegment("segment1") + ).build() + ).build(), + flagBuilder("flag3").version(1).build(), + flagBuilder("flag4").version(1).prerequisites(prerequisite("flag2", 0)).build()) + .addAny(SEGMENTS, + segmentBuilder("segment1").version(1).build(), + segmentBuilder("segment2").version(1).build()); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + + storeUpdates.init(builder.build()); + + BlockingQueue eventSink = new LinkedBlockingQueue<>(); + flagChangeBroadcaster.register(eventSink::add); + + builder.addAny(SEGMENTS, segmentBuilder("segment1").version(2).build()); + storeUpdates.init(builder.build()); + + expectEvents(eventSink, "flag2", "flag4"); + } + + @Test + public void dataSetIsPassedToDataStoreInCorrectOrder() throws Exception { + // The logic for this is already tested in DataModelDependenciesTest, but here we are verifying + // that DataSourceUpdatesImpl is actually using DataModelDependencies. + Capture> captureData = Capture.newInstance(); + DataStore store = mocks.createStrictMock(DataStore.class); + store.init(EasyMock.capture(captureData)); + replay(store); + + DataSourceUpdatesImpl storeUpdates = makeInstance(store); + storeUpdates.init(DataModelDependenciesTest.DEPENDENCY_ORDERING_TEST_DATA); + + DataModelDependenciesTest.verifySortedData(captureData.getValue(), + DataModelDependenciesTest.DEPENDENCY_ORDERING_TEST_DATA); + + } + + @Test + public void updateStatusBroadcastsNewStatus() { + EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, nullLogger); + DataSourceUpdatesImpl updates = makeInstance(inMemoryDataStore(), broadcaster); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + Instant timeBeforeUpdate = Instant.now(); + ErrorInfo errorInfo = ErrorInfo.fromHttpError(401); + updates.updateStatus(State.OFF, errorInfo); + + Status status = awaitValue(statuses, 500, TimeUnit.MILLISECONDS); + + assertThat(status.getState(), is(State.OFF)); + assertThat(status.getStateSince(), greaterThanOrEqualTo(timeBeforeUpdate)); + assertThat(status.getLastError(), is(errorInfo)); + } + + @Test + public void updateStatusKeepsStateUnchangedIfStateWasInitializingAndNewStateIsInterrupted() { + EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, nullLogger); + DataSourceUpdatesImpl updates = makeInstance(inMemoryDataStore(), broadcaster); + + assertThat(updates.getLastStatus().getState(), is(State.INITIALIZING)); + Instant originalTime = updates.getLastStatus().getStateSince(); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + ErrorInfo errorInfo = ErrorInfo.fromHttpError(401); + updates.updateStatus(State.INTERRUPTED, errorInfo); + + Status status = awaitValue(statuses, 500, TimeUnit.MILLISECONDS); + + assertThat(status.getState(), is(State.INITIALIZING)); + assertThat(status.getStateSince(), is(originalTime)); + assertThat(status.getLastError(), is(errorInfo)); + } + + @Test + public void updateStatusDoesNothingIfParametersHaveNoNewData() { + EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, nullLogger); + DataSourceUpdatesImpl updates = makeInstance(inMemoryDataStore(), broadcaster); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + updates.updateStatus(null, null); + updates.updateStatus(State.INITIALIZING, null); + + assertNoMoreValues(statuses, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void outageTimeoutLogging() throws Exception { + BlockingQueue outageErrors = new LinkedBlockingQueue<>(); + Duration outageTimeout = Duration.ofMillis(100); + + DataSourceUpdatesImpl updates = new DataSourceUpdatesImpl( + inMemoryDataStore(), + null, + flagChangeBroadcaster, + EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, nullLogger), + sharedExecutor, + outageTimeout, + nullLogger + ); + updates.onOutageErrorLog = outageErrors::add; + + // simulate an outage + updates.updateStatus(State.INTERRUPTED, ErrorInfo.fromHttpError(500)); + + // but recover from it immediately + updates.updateStatus(State.VALID, null); + + // wait till the timeout would have elapsed - no special message should be logged + assertNoMoreValues(outageErrors, outageTimeout.plus(Duration.ofMillis(20)).toMillis(), TimeUnit.MILLISECONDS); + + // simulate another outage + updates.updateStatus(State.INTERRUPTED, ErrorInfo.fromHttpError(501)); + updates.updateStatus(State.INTERRUPTED, ErrorInfo.fromHttpError(502)); + updates.updateStatus(State.INTERRUPTED, ErrorInfo.fromException(ErrorKind.NETWORK_ERROR, new IOException("x"))); + updates.updateStatus(State.INTERRUPTED, ErrorInfo.fromHttpError(501)); + + String errorsDesc = awaitValue(outageErrors, 250, TimeUnit.MILLISECONDS); // timing is approximate + assertThat(errorsDesc, containsString("NETWORK_ERROR (1 time)")); + assertThat(errorsDesc, containsString("ERROR_RESPONSE(501) (2 times)")); + assertThat(errorsDesc, containsString("ERROR_RESPONSE(502) (1 time)")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImplTest.java new file mode 100644 index 0000000..abbc5a4 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreStatusProviderImplTest.java @@ -0,0 +1,124 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.Status; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; + +import org.junit.Test; + +import java.io.IOException; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +@SuppressWarnings("javadoc") +public class DataStoreStatusProviderImplTest extends BaseTest { + private EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, testLogger); + private MockDataStore store = new MockDataStore(); + private DataStoreUpdatesImpl updates = new DataStoreUpdatesImpl(broadcaster); + private DataStoreStatusProviderImpl statusProvider = new DataStoreStatusProviderImpl(store, updates); + + @Test + public void getStatus() throws Exception { + assertThat(statusProvider.getStatus(), equalTo(new Status(true, false))); + + updates.updateStatus(new Status(false, false)); + + assertThat(statusProvider.getStatus(), equalTo(new Status(false, false))); + + updates.updateStatus(new Status(false, true)); + + assertThat(statusProvider.getStatus(), equalTo(new Status(false, true))); + } + + @Test + public void statusListeners() throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + statusProvider.addStatusListener(statuses::add); + + BlockingQueue unwantedStatuses = new LinkedBlockingQueue<>(); + DataStoreStatusProvider.StatusListener listener2 = unwantedStatuses::add; + statusProvider.addStatusListener(listener2); + statusProvider.removeStatusListener(listener2); // testing that a listener can be unregistered + + updates.updateStatus(new Status(false, false)); + + Status newStatus = awaitValue(statuses, 500, TimeUnit.MILLISECONDS); + assertThat(newStatus, equalTo(new Status(false, false))); + + assertNoMoreValues(unwantedStatuses, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void isStatusMonitoringEnabled() { + assertThat(statusProvider.isStatusMonitoringEnabled(), equalTo(false)); + + store.statusMonitoringEnabled = true; + + assertThat(statusProvider.isStatusMonitoringEnabled(), equalTo(true)); + } + + @Test + public void cacheStats() { + assertThat(statusProvider.getCacheStats(), nullValue()); + + CacheStats stats = new CacheStats(0, 0, 0, 0, 0, 0); + store.cacheStats = stats; + + assertThat(statusProvider.getCacheStats(), equalTo(stats)); + } + + private static final class MockDataStore implements DataStore { + volatile boolean statusMonitoringEnabled; + volatile CacheStats cacheStats; + + @Override + public void close() throws IOException {} + + @Override + public void init(FullDataSet allData) {} + + @Override + public ItemDescriptor get(DataKind kind, String key) { + return null; + } + + @Override + public KeyedItems getAll(DataKind kind) { + return null; + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + return false; + } + + @Override + public boolean isInitialized() { + return false; + } + + @Override + public boolean isStatusMonitoringEnabled() { + return statusMonitoringEnabled; + } + + @Override + public CacheStats getCacheStats() { + return cacheStats; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestBase.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestBase.java new file mode 100644 index 0000000..1b9df6c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestBase.java @@ -0,0 +1,168 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; + +import static com.launchdarkly.sdk.server.DataStoreTestTypes.OTHER_TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toItemsMap; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** + * Basic tests for FeatureStore implementations. For database implementations, use the more + * comprehensive FeatureStoreDatabaseTestBase. + */ +@SuppressWarnings("javadoc") +public abstract class DataStoreTestBase { + + protected DataStore store; + + protected TestItem item1 = new TestItem("key1", "first", 10); + + protected TestItem item2 = new TestItem("key2", "second", 10); + + protected TestItem otherItem1 = new TestItem("key1", "other-first", 11); + + /** + * Test subclasses must override this method to create an instance of the feature store class. + * @return + */ + protected abstract DataStore makeStore(); + + @Before + public void setup() { + store = makeStore(); + } + + @After + public void teardown() throws Exception { + store.close(); + } + + @Test + public void storeNotInitializedBeforeInit() { + assertFalse(store.isInitialized()); + } + + @Test + public void storeInitializedAfterInit() { + store.init(new DataBuilder().build()); + assertTrue(store.isInitialized()); + } + + @Test + public void initCompletelyReplacesPreviousData() { + FullDataSet allData = + new DataBuilder().add(TEST_ITEMS, item1, item2).add(OTHER_TEST_ITEMS, otherItem1).build(); + store.init(allData); + + TestItem item2v2 = item2.withVersion(item2.version + 1); + allData = new DataBuilder().add(TEST_ITEMS, item2v2).add(OTHER_TEST_ITEMS).build(); + store.init(allData); + + assertNull(store.get(TEST_ITEMS, item1.key)); + assertEquals(item2v2.toItemDescriptor(), store.get(TEST_ITEMS, item2.key)); + assertNull(store.get(OTHER_TEST_ITEMS, otherItem1.key)); + } + + @Test + public void getExistingItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + assertEquals(item1.toItemDescriptor(), store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void getNonexistingItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + assertNull(store.get(TEST_ITEMS, "biz")); + } + + @Test + public void getAll() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).add(OTHER_TEST_ITEMS, otherItem1).build()); + Map items = toItemsMap(store.getAll(TEST_ITEMS)); + assertEquals(2, items.size()); + assertEquals(item1.toItemDescriptor(), items.get(item1.key)); + assertEquals(item2.toItemDescriptor(), items.get(item2.key)); + } + + @Test + public void getAllWithDeletedItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(item1.getVersion() + 1); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + Map items = toItemsMap(store.getAll(TEST_ITEMS)); + assertEquals(2, items.size()); + assertEquals(deletedItem, items.get(item1.key)); + assertEquals(item2.toItemDescriptor(), items.get(item2.key)); + } + + @Test + public void upsertWithNewerVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + TestItem newVer = item1.withVersion(item1.version + 1); + store.upsert(TEST_ITEMS, item1.key, newVer.toItemDescriptor()); + assertEquals(newVer.toItemDescriptor(), store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void upsertWithOlderVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + TestItem oldVer = item1.withVersion(item1.version - 1); + store.upsert(TEST_ITEMS, item1.key, oldVer.toItemDescriptor()); + assertEquals(item1.toItemDescriptor(), store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void upsertNewItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + TestItem newItem = new TestItem("new-name", "new-key", 99); + store.upsert(TEST_ITEMS, newItem.key, newItem.toItemDescriptor()); + assertEquals(newItem.toItemDescriptor(), store.get(TEST_ITEMS, newItem.key)); + } + + @Test + public void deleteWithNewerVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(item1.version + 1); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + assertEquals(deletedItem, store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void deleteWithOlderVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(item1.version - 1); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + assertEquals(item1.toItemDescriptor(), store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void deleteUnknownItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(item1.version - 1); + store.upsert(TEST_ITEMS, "biz", deletedItem); + assertEquals(deletedItem, store.get(TEST_ITEMS, "biz")); + } + + @Test + public void upsertOlderVersionAfterDelete() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(item1.version + 1); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + store.upsert(TEST_ITEMS, item1.key, item1.toItemDescriptor()); + assertEquals(deletedItem, store.get(TEST_ITEMS, item1.key)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestTypes.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestTypes.java new file mode 100644 index 0000000..4e5bd0f --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreTestTypes.java @@ -0,0 +1,211 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; + +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Objects; + +import static com.google.common.collect.Iterables.transform; +import static com.launchdarkly.sdk.server.TestUtil.TEST_GSON_INSTANCE; + +@SuppressWarnings("javadoc") +public class DataStoreTestTypes { + public static Map> toDataMap(FullDataSet data) { + return ImmutableMap.copyOf(transform(data.getData(), e -> new AbstractMap.SimpleEntry<>(e.getKey(), toItemsMap(e.getValue())))); + } + + public static Map toItemsMap(KeyedItems data) { + return ImmutableMap.copyOf(data.getItems()); + } + + public static SerializedItemDescriptor toSerialized(DataKind kind, ItemDescriptor item) { + boolean isDeleted = item.getItem() == null; + return new SerializedItemDescriptor(item.getVersion(), isDeleted, kind.serialize(item)); + } + + public static class TestItem implements VersionedData { + public final String key; + public final String name; + public final int version; + public final boolean deleted; + + public TestItem(String key, String name, int version, boolean deleted) { + this.key = key; + this.name = name; + this.version = version; + this.deleted = deleted; + } + + public TestItem(String key, String name, int version) { + this(key, name, version, false); + } + + public TestItem(String key, int version) { + this(key, "", version); + } + + @Override + public String getKey() { + return key; + } + + @Override + public int getVersion() { + return version; + } + + public boolean isDeleted() { + return deleted; + } + + public TestItem withName(String newName) { + return new TestItem(key, newName, version); + } + + public TestItem withVersion(int newVersion) { + return new TestItem(key, name, newVersion); + } + + public ItemDescriptor toItemDescriptor() { + return new ItemDescriptor(version, this); + } + + public Map.Entry toKeyedItemDescriptor() { + return new AbstractMap.SimpleEntry<>(key, toItemDescriptor()); + } + + public SerializedItemDescriptor toSerializedItemDescriptor() { + return toSerialized(TEST_ITEMS, toItemDescriptor()); + } + + @Override + public boolean equals(Object other) { + if (other instanceof TestItem) { + TestItem o = (TestItem)other; + return Objects.equals(name, o.name) && + Objects.equals(key, o.key) && + version == o.version; + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(name, key, version); + } + + @Override + public String toString() { + return "TestItem(" + name + "," + key + "," + version + ")"; + } + } + + public static final DataKind TEST_ITEMS = new DataKind("test-items", + DataStoreTestTypes::serializeTestItem, + DataStoreTestTypes::deserializeTestItem); + + public static final DataKind OTHER_TEST_ITEMS = new DataKind("other-test-items", + DataStoreTestTypes::serializeTestItem, + DataStoreTestTypes::deserializeTestItem); + + private static String serializeTestItem(ItemDescriptor item) { + if (item.getItem() == null) { + return "DELETED:" + item.getVersion(); + } + return TEST_GSON_INSTANCE.toJson(item.getItem()); + } + + private static ItemDescriptor deserializeTestItem(String s) { + if (s.startsWith("DELETED:")) { + return ItemDescriptor.deletedItem(Integer.parseInt(s.substring(8))); + } + TestItem ti = TEST_GSON_INSTANCE.fromJson(s, TestItem.class); + return new ItemDescriptor(ti.version, ti); + } + + public static class DataBuilder { + private Map> data = new HashMap<>(); + + public static DataBuilder forStandardTypes() { + // This just ensures that we use realistic-looking data sets in our tests when simulating + // an LD service response, which will always include "flags" and "segments" even if empty. + DataBuilder ret = new DataBuilder(); + ret.add(DataModel.FEATURES); + ret.add(DataModel.SEGMENTS); + return ret; + } + + public DataBuilder add(DataKind kind, TestItem... items) { + return addAny(kind, items); + } + + // This is defined separately because test code that's outside of this package can't see DataModel.VersionedData + public DataBuilder addAny(DataKind kind, VersionedData... items) { + Map itemsMap = data.get(kind); + if (itemsMap == null) { + itemsMap = new LinkedHashMap<>(); // use LinkedHashMap to preserve insertion order + data.put(kind, itemsMap); + } + for (VersionedData item: items) { + itemsMap.put(item.getKey(), new ItemDescriptor(item.getVersion(), item)); + } + return this; + } + + public DataBuilder remove(DataKind kind, String key) { + if (data.get(kind) != null) { + data.get(kind).remove(key); + } + return this; + } + + public FullDataSet build() { + return new FullDataSet<>( + ImmutableMap.copyOf( + Maps.transformValues(data, itemsMap -> + new KeyedItems<>(ImmutableList.copyOf(itemsMap.entrySet())) + )).entrySet() + ); + } + + public FullDataSet buildSerialized() { + return new FullDataSet<>( + ImmutableMap.copyOf( + Maps.transformEntries(data, (kind, itemsMap) -> + new KeyedItems<>( + ImmutableMap.copyOf( + Maps.transformValues(itemsMap, item -> DataStoreTestTypes.toSerialized(kind, item)) + ).entrySet() + ) + ) + ).entrySet()); + } + + public LDValue buildJson() { + FullDataSet allData = buildSerialized(); + ObjectBuilder allBuilder = LDValue.buildObject(); + for (Map.Entry> coll: allData.getData()) { + String namespace = coll.getKey().getName().equals("features") ? "flags" : coll.getKey().getName(); + ObjectBuilder itemsBuilder = LDValue.buildObject(); + for (Map.Entry item: coll.getValue().getItems()) { + itemsBuilder.put(item.getKey(), LDValue.parse(item.getValue().getSerializedItem())); + } + allBuilder.put(namespace, itemsBuilder.build()); + } + return allBuilder.build(); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreUpdatesImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreUpdatesImplTest.java new file mode 100644 index 0000000..b2176ba --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DataStoreUpdatesImplTest.java @@ -0,0 +1,56 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.Status; + +import org.junit.Test; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class DataStoreUpdatesImplTest extends BaseTest { + private EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, testLogger); + private final DataStoreUpdatesImpl updates = new DataStoreUpdatesImpl(broadcaster); + + @Test + public void updateStatusBroadcastsNewStatus() { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + updates.updateStatus(new Status(false, false)); + + Status newStatus = awaitValue(statuses, 200, TimeUnit.MILLISECONDS); + assertThat(newStatus, equalTo(new Status(false, false))); + + assertNoMoreValues(statuses, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void updateStatusDoesNothingIfNewStatusIsSame() { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + updates.updateStatus(new Status(true, false)); + + assertNoMoreValues(statuses, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void updateStatusDoesNothingIfNewStatusIsNull() { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + broadcaster.register(statuses::add); + + updates.updateStatus(null); + + assertNoMoreValues(statuses, 100, TimeUnit.MILLISECONDS); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DefaultFeatureRequestorTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DefaultFeatureRequestorTest.java new file mode 100644 index 0000000..e35afd1 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/DefaultFeatureRequestorTest.java @@ -0,0 +1,247 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.internal.http.HttpErrors.HttpErrorException; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.SerializationException; +import com.launchdarkly.testhelpers.httptest.Handler; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestInfo; +import com.launchdarkly.testhelpers.httptest.SpecialHttpConfigurations; + +import org.junit.Test; + +import java.net.URI; +import java.util.Map; + +import static com.launchdarkly.sdk.server.JsonHelpers.serialize; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestUtil.assertDataSetEquals; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class DefaultFeatureRequestorTest extends BaseTest { + private static final String sdkKey = "sdk-key"; + private static final String flag1Key = "flag1"; + private static final FeatureFlag flag1 = flagBuilder(flag1Key).version(1000).build(); + private static final String flag1Json = serialize(flag1); + private static final String flagsJson = "{\"" + flag1Key + "\":" + flag1Json + "}"; + private static final String segment1Key = "segment1"; + private static final Segment segment1 = segmentBuilder(segment1Key).version(2000).build(); + private static final String segment1Json = serialize(segment1); + private static final String segmentsJson = "{\"" + segment1Key + "\":" + segment1Json + "}"; + private static final String allDataJson = "{\"flags\":" + flagsJson + ",\"segments\":" + segmentsJson + "}"; + + private DefaultFeatureRequestor makeRequestor(HttpServer server) { + return makeRequestor(server, LDConfig.DEFAULT); + // We can always use LDConfig.DEFAULT unless we need to modify HTTP properties, since DefaultFeatureRequestor + // no longer uses the deprecated LDConfig.baseUri property. + } + + private DefaultFeatureRequestor makeRequestor(HttpServer server, LDConfig config) { + return new DefaultFeatureRequestor(makeHttpConfig(config), server.getUri(), null, testLogger); + } + + private HttpProperties makeHttpConfig(LDConfig config) { + return ComponentsImpl.toHttpProperties(config.http.build(new ClientContext(sdkKey))); + } + + private void verifyExpectedData(FullDataSet data) { + assertNotNull(data); + assertDataSetEquals(DataBuilder.forStandardTypes() + .addAny(DataModel.FEATURES, flag1).addAny(DataModel.SEGMENTS, segment1).build(), + data); + } + + @Test + public void requestAllData() throws Exception { + Handler resp = Handlers.bodyJson(allDataJson); + + try (HttpServer server = HttpServer.start(resp)) { + try (DefaultFeatureRequestor r = makeRequestor(server)) { + FullDataSet data = r.getAllData(true); + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req.getPath()); + verifyHeaders(req); + + verifyExpectedData(data); + } + } + } + + @Test + public void responseIsCached() throws Exception { + Handler cacheableResp = Handlers.all( + Handlers.header("ETag", "aaa"), + Handlers.header("Cache-Control", "max-age=0"), + Handlers.bodyJson(allDataJson) + ); + Handler cachedResp = Handlers.status(304); + Handler cacheableThenCached = Handlers.sequential(cacheableResp, cachedResp); + + try (HttpServer server = HttpServer.start(cacheableThenCached)) { + try (DefaultFeatureRequestor r = makeRequestor(server)) { + FullDataSet data1 = r.getAllData(true); + verifyExpectedData(data1); + + RequestInfo req1 = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req1.getPath()); + verifyHeaders(req1); + assertNull(req1.getHeader("If-None-Match")); + + FullDataSet data2 = r.getAllData(false); + assertNull(data2); + + RequestInfo req2 = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req2.getPath()); + verifyHeaders(req2); + assertEquals("aaa", req2.getHeader("If-None-Match")); + } + } + } + + @Test + public void responseIsCachedButWeWantDataAnyway() throws Exception { + Handler cacheableResp = Handlers.all( + Handlers.header("ETag", "aaa"), + Handlers.header("Cache-Control", "max-age=0"), + Handlers.bodyJson(allDataJson) + ); + Handler cachedResp = Handlers.status(304); + Handler cacheableThenCached = Handlers.sequential(cacheableResp, cachedResp); + + try (HttpServer server = HttpServer.start(cacheableThenCached)) { + try (DefaultFeatureRequestor r = makeRequestor(server)) { + FullDataSet data1 = r.getAllData(true); + verifyExpectedData(data1); + + RequestInfo req1 = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req1.getPath()); + verifyHeaders(req1); + assertNull(req1.getHeader("If-None-Match")); + + FullDataSet data2 = r.getAllData(true); + verifyExpectedData(data2); + + RequestInfo req2 = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req2.getPath()); + verifyHeaders(req2); + assertEquals("aaa", req2.getHeader("If-None-Match")); + } + } + } + + @Test + public void testSpecialHttpConfigurations() throws Exception { + Handler handler = Handlers.bodyJson(allDataJson); + + SpecialHttpConfigurations.testAll(handler, + (URI serverUri, SpecialHttpConfigurations.Params params) -> { + LDConfig config = new LDConfig.Builder().http(TestUtil.makeHttpConfigurationFromTestParams(params)).build(); + try (DefaultFeatureRequestor r = new DefaultFeatureRequestor(makeHttpConfig(config), serverUri, null, testLogger)) { + FullDataSet data = r.getAllData(false); + verifyExpectedData(data); + return true; + } catch (SerializationException e) { + throw new SpecialHttpConfigurations.UnexpectedResponseException(e.toString()); + } catch (HttpErrorException e) { + throw new SpecialHttpConfigurations.UnexpectedResponseException(e.toString()); + } + }); + } + + @Test + public void baseUriDoesNotNeedTrailingSlash() throws Exception { + Handler resp = Handlers.bodyJson(allDataJson); + + try (HttpServer server = HttpServer.start(resp)) { + try (DefaultFeatureRequestor r = new DefaultFeatureRequestor(makeHttpConfig(LDConfig.DEFAULT), server.getUri(), null, testLogger)) { + FullDataSet data = r.getAllData(true); + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/sdk/latest-all", req.getPath()); + verifyHeaders(req); + + verifyExpectedData(data); + } + } + } + + @Test + public void baseUriCanHaveContextPath() throws Exception { + Handler resp = Handlers.bodyJson(allDataJson); + + try (HttpServer server = HttpServer.start(resp)) { + URI uri = server.getUri().resolve("/context/path"); + + try (DefaultFeatureRequestor r = new DefaultFeatureRequestor(makeHttpConfig(LDConfig.DEFAULT), uri, null, testLogger)) { + FullDataSet data = r.getAllData(true); + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/context/path/sdk/latest-all", req.getPath()); + verifyHeaders(req); + + verifyExpectedData(data); + } + } + } + + @Test + public void pollingUriCanHavePayload() throws Exception { + Handler resp = Handlers.bodyJson(allDataJson); + + try (HttpServer server = HttpServer.start(resp)) { + URI uri = server.getUri().resolve("/context/path"); + + try (DefaultFeatureRequestor r = new DefaultFeatureRequestor(makeHttpConfig(LDConfig.DEFAULT), uri, "myFilter", testLogger)) { + FullDataSet data = r.getAllData(true); + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("?filter=myFilter", req.getQuery()); + verifyHeaders(req); + + verifyExpectedData(data); + } + } + } + + @Test + public void ignoreEmptyFilter() throws Exception { + Handler resp = Handlers.bodyJson(allDataJson); + + try (HttpServer server = HttpServer.start(resp)) { + URI uri = server.getUri().resolve("/context/path"); + + try (DefaultFeatureRequestor r = new DefaultFeatureRequestor(makeHttpConfig(LDConfig.DEFAULT), uri, "", testLogger)) { + FullDataSet data = r.getAllData(true); + + RequestInfo req = server.getRecorder().requireRequest(); + assertNull(req.getQuery()); + verifyHeaders(req); + + verifyExpectedData(data); + } + } + } + + private void verifyHeaders(RequestInfo req) { + HttpConfiguration httpConfig = clientContext(sdkKey, LDConfig.DEFAULT).getHttp(); + for (Map.Entry kv: httpConfig.getDefaultHeaders()) { + assertThat(req.getHeader(kv.getKey()), equalTo(kv.getValue())); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvalResultTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvalResultTest.java new file mode 100644 index 0000000..71ef5de --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvalResultTest.java @@ -0,0 +1,158 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDValue; + +import org.junit.Test; + +import java.util.function.Function; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.EvaluationReason.ErrorKind.WRONG_TYPE; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +@SuppressWarnings("javadoc") +public class EvalResultTest { + private static final LDValue SOME_VALUE = LDValue.of("value"); + private static final LDValue ARRAY_VALUE = LDValue.arrayOf(); + private static final LDValue OBJECT_VALUE = LDValue.buildObject().build(); + private static final int SOME_VARIATION = 11; + private static final EvaluationReason SOME_REASON = EvaluationReason.fallthrough(); + + @Test + public void getValue() { + assertThat(EvalResult.of(EvaluationDetail.fromValue(SOME_VALUE, SOME_VARIATION, SOME_REASON)).getValue(), + equalTo(SOME_VALUE)); + assertThat(EvalResult.of(SOME_VALUE, SOME_VARIATION, SOME_REASON).getValue(), + equalTo(SOME_VALUE)); + } + + @Test + public void getVariationIndex() { + assertThat(EvalResult.of(EvaluationDetail.fromValue(SOME_VALUE, SOME_VARIATION, SOME_REASON)).getVariationIndex(), + equalTo(SOME_VARIATION)); + assertThat(EvalResult.of(SOME_VALUE, SOME_VARIATION, SOME_REASON).getVariationIndex(), + equalTo(SOME_VARIATION)); + } + + @Test + public void getReason() { + assertThat(EvalResult.of(EvaluationDetail.fromValue(SOME_VALUE, SOME_VARIATION, SOME_REASON)).getReason(), + equalTo(SOME_REASON)); + assertThat(EvalResult.of(SOME_VALUE, SOME_VARIATION, SOME_REASON).getReason(), + equalTo(SOME_REASON)); + } + + @Test + public void isNoVariation() { + assertThat(EvalResult.of(EvaluationDetail.fromValue(SOME_VALUE, SOME_VARIATION, SOME_REASON)).isNoVariation(), + is(false)); + assertThat(EvalResult.of(SOME_VALUE, SOME_VARIATION, SOME_REASON).isNoVariation(), + is(false)); + + assertThat(EvalResult.of(EvaluationDetail.fromValue(SOME_VALUE, NO_VARIATION, SOME_REASON)).isNoVariation(), + is(true)); + assertThat(EvalResult.of(SOME_VALUE, NO_VARIATION, SOME_REASON).isNoVariation(), + is(true)); + } + + @Test + public void getAnyType() { + testForType(SOME_VALUE, SOME_VALUE, r -> r.getAnyType()); + } + + @Test + public void getAsBoolean() { + testForType(true, LDValue.of(true), r -> r.getAsBoolean()); + + testWrongType(false, LDValue.ofNull(), r -> r.getAsBoolean()); + testWrongType(false, LDValue.of(1), r -> r.getAsBoolean()); + testWrongType(false, LDValue.of("a"), r -> r.getAsBoolean()); + testWrongType(false, ARRAY_VALUE, r -> r.getAsBoolean()); + testWrongType(false, OBJECT_VALUE, r -> r.getAsBoolean()); + } + + @Test + public void getAsInteger() { + testForType(99, LDValue.of(99), r -> r.getAsInteger()); + testForType(99, LDValue.of(99.25), r -> r.getAsInteger()); + + testWrongType(0, LDValue.ofNull(), r -> r.getAsInteger()); + testWrongType(0, LDValue.of(true), r -> r.getAsInteger()); + testWrongType(0, LDValue.of("a"), r -> r.getAsInteger()); + testWrongType(0, ARRAY_VALUE, r -> r.getAsInteger()); + testWrongType(0, OBJECT_VALUE, r -> r.getAsInteger()); + } + + @Test + public void getAsDouble() { + testForType((double)99, LDValue.of(99), r -> r.getAsDouble()); + testForType((double)99.25, LDValue.of(99.25), r -> r.getAsDouble()); + + testWrongType((double)0, LDValue.ofNull(), r -> r.getAsDouble()); + testWrongType((double)0, LDValue.of(true), r -> r.getAsDouble()); + testWrongType((double)0, LDValue.of("a"), r -> r.getAsDouble()); + testWrongType((double)0, ARRAY_VALUE, r -> r.getAsDouble()); + testWrongType((double)0, OBJECT_VALUE, r -> r.getAsDouble()); + } + + @Test + public void getAsString() { + testForType("a", LDValue.of("a"), r -> r.getAsString()); + testForType((String)null, LDValue.ofNull(), r -> r.getAsString()); + + testWrongType((String)null, LDValue.of(true), r -> r.getAsString()); + testWrongType((String)null, LDValue.of(1), r -> r.getAsString()); + testWrongType((String)null, ARRAY_VALUE, r -> r.getAsString()); + testWrongType((String)null, OBJECT_VALUE, r -> r.getAsString()); + } + + @Test + public void withReason() { + EvalResult r = EvalResult.of(LDValue.of(true), SOME_VARIATION, EvaluationReason.fallthrough()); + + EvalResult r1 = r.withReason(EvaluationReason.off()); + assertThat(r1.getReason(), equalTo(EvaluationReason.off())); + assertThat(r1.getValue(), equalTo(r.getValue())); + assertThat(r1.getVariationIndex(), equalTo(r.getVariationIndex())); + } + + @Test + public void withForceReasonTracking() { + EvalResult r = EvalResult.of(SOME_VALUE, SOME_VARIATION, SOME_REASON); + assertThat(r.isForceReasonTracking(), is(false)); + + EvalResult r0 = r.withForceReasonTracking(false); + assertThat(r0, sameInstance(r)); + + EvalResult r1 = r.withForceReasonTracking(true); + assertThat(r1.isForceReasonTracking(), is(true)); + assertThat(r1.getAnyType(), sameInstance(r.getAnyType())); + } + + private void testForType(T value, LDValue ldValue, Function getter) { + assertThat( + getter.apply(EvalResult.of(EvaluationDetail.fromValue(ldValue, SOME_VARIATION, SOME_REASON))), + equalTo(EvaluationDetail.fromValue(value, SOME_VARIATION, SOME_REASON)) + ); + assertThat( + getter.apply(EvalResult.of(EvaluationDetail.fromValue(ldValue, SOME_VARIATION, SOME_REASON))), + equalTo(EvaluationDetail.fromValue(value, SOME_VARIATION, SOME_REASON)) + ); + } + + private void testWrongType(T value, LDValue ldValue, Function getter) { + assertThat( + getter.apply(EvalResult.of(EvaluationDetail.fromValue(ldValue, SOME_VARIATION, SOME_REASON))), + equalTo(EvaluationDetail.fromValue(value, EvaluationDetail.NO_VARIATION, EvaluationReason.error(WRONG_TYPE))) + ); + assertThat( + getter.apply(EvalResult.of(EvaluationDetail.fromValue(ldValue, SOME_VARIATION, SOME_REASON))), + equalTo(EvaluationDetail.fromValue(value, EvaluationDetail.NO_VARIATION, EvaluationReason.error(WRONG_TYPE))) + ); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBigSegmentTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBigSegmentTest.java new file mode 100644 index 0000000..28d0564 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBigSegmentTest.java @@ -0,0 +1,195 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.Evaluator.makeBigSegmentRef; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.booleanFlagWithClauses; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentRuleBuilder; +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; +import static java.util.Arrays.asList; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.strictMock; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class EvaluatorBigSegmentTest extends EvaluatorTestBase { + private static final LDContext testUser = LDContext.create("userkey"); + + @Test + public void bigSegmentWithNoProviderIsNotMatched() { + Segment segment = segmentBuilder("segmentkey").unbounded(true).generation(1) + .included(testUser.getKey()) // Included should be ignored for a big segment + .build(); + FeatureFlag flag = booleanFlagWithClauses("key", clauseMatchingSegment(segment)); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment).withBigSegmentQueryResult(testUser.getKey(), null).build(); + + EvalResult result = evaluator.evaluate(flag, testUser, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(false), result.getValue()); + assertEquals(BigSegmentsStatus.NOT_CONFIGURED, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void bigSegmentWithNoGenerationIsNotMatched() { + // Segment without generation + Segment segment = segmentBuilder("segmentkey").unbounded(true).build(); + FeatureFlag flag = booleanFlagWithClauses("key", clauseMatchingSegment(segment)); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment).build(); + + EvalResult result = evaluator.evaluate(flag, testUser, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(false), result.getValue()); + assertEquals(BigSegmentsStatus.NOT_CONFIGURED, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void matchedWithIncludeForDefaultKind() { + testMatchedWithInclude(false, false); + testMatchedWithInclude(false, true); + } + + @Test + public void matchedWithIncludeForNonDefaultKind() { + testMatchedWithInclude(true, false); + testMatchedWithInclude(true, true); + } + + private void testMatchedWithInclude(boolean nonDefaultKind, boolean multiKindContext) { + String targetKey = "contextkey"; + ContextKind kind1 = ContextKind.of("kind1"); + LDContext singleKindContext = nonDefaultKind ? LDContext.create(kind1, targetKey) : LDContext.create(targetKey); + LDContext evalContext = multiKindContext ? + LDContext.createMulti(singleKindContext, LDContext.create(ContextKind.of("kind2"), "key2")) : + singleKindContext; + + Segment segment = segmentBuilder("segmentkey") + .unbounded(true) + .unboundedContextKind(nonDefaultKind ? kind1 : null) + .generation(2) + .build(); + FeatureFlag flag = booleanFlagWithClauses("flagkey", clauseMatchingSegment(segment)); + + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = new BigSegmentStoreWrapper.BigSegmentsQueryResult(); + queryResult.status = BigSegmentsStatus.HEALTHY; + queryResult.membership = createMembershipFromSegmentRefs(asList(makeBigSegmentRef(segment)), null); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment) + .withBigSegmentQueryResult(targetKey, queryResult).build(); + + EvalResult result = evaluator.evaluate(flag, evalContext, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(true), result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void matchedWithRule() { + Clause clause = clauseMatchingContext(testUser); + SegmentRule segmentRule = segmentRuleBuilder().clauses(clause).build(); + Segment segment = segmentBuilder("segmentkey").unbounded(true).generation(2) + .rules(segmentRule) + .build(); + FeatureFlag flag = booleanFlagWithClauses("key", clauseMatchingSegment(segment)); + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = new BigSegmentStoreWrapper.BigSegmentsQueryResult(); + queryResult.status = BigSegmentsStatus.HEALTHY; + queryResult.membership = createMembershipFromSegmentRefs(null, null); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment).withBigSegmentQueryResult(testUser.getKey(), queryResult).build(); + + EvalResult result = evaluator.evaluate(flag, testUser, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(true), result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void unmatchedByExcludeRegardlessOfRule() { + Clause clause = clauseMatchingContext(testUser); + SegmentRule segmentRule = segmentRuleBuilder().clauses(clause).build(); + Segment segment = segmentBuilder("segmentkey").unbounded(true).generation(2) + .rules(segmentRule) + .build(); + FeatureFlag flag = booleanFlagWithClauses("key", clauseMatchingSegment(segment)); + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = new BigSegmentStoreWrapper.BigSegmentsQueryResult(); + queryResult.status = BigSegmentsStatus.HEALTHY; + queryResult.membership = createMembershipFromSegmentRefs(null, asList(makeBigSegmentRef(segment))); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment).withBigSegmentQueryResult(testUser.getKey(), queryResult).build(); + + EvalResult result = evaluator.evaluate(flag, testUser, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(false), result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void bigSegmentStatusIsReturnedFromProvider() { + Segment segment = segmentBuilder("segmentkey").unbounded(true).generation(2).build(); + FeatureFlag flag = booleanFlagWithClauses("key", clauseMatchingSegment(segment)); + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResult = new BigSegmentStoreWrapper.BigSegmentsQueryResult(); + queryResult.status = BigSegmentsStatus.STALE; + queryResult.membership = createMembershipFromSegmentRefs(asList(makeBigSegmentRef(segment)), null); + Evaluator evaluator = evaluatorBuilder().withStoredSegments(segment).withBigSegmentQueryResult(testUser.getKey(), queryResult).build(); + + EvalResult result = evaluator.evaluate(flag, testUser, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(true), result.getValue()); + assertEquals(BigSegmentsStatus.STALE, result.getReason().getBigSegmentsStatus()); + } + + @Test + public void bigSegmentStateIsQueriedOnlyOncePerKeyEvenIfFlagReferencesMultipleSegments() { + ContextKind kind1 = ContextKind.of("kind1"), kind2 = ContextKind.of("kind2"), kind3 = ContextKind.of("kind3"); + String key1 = "contextkey1", key2 = "contextkey2"; + LDContext context = LDContext.createMulti( + LDContext.create(kind1, key1), + LDContext.create(kind2, key2), + LDContext.create(kind3, key2) // deliberately using same key for kind2 and kind3 + ); + + Segment segment1 = segmentBuilder("segmentkey1").unbounded(true).unboundedContextKind(kind1).generation(2).build(); + Segment segment2 = segmentBuilder("segmentkey2").unbounded(true).unboundedContextKind(kind2).generation(3).build(); + Segment segment3 = segmentBuilder("segmentkey3").unbounded(true).unboundedContextKind(kind3).generation(4).build(); + + // Set up the flag with a rule for each segment + FeatureFlag flag = flagBuilder("key") + .on(true) + .fallthroughVariation(0) + .variations(false, true) + .rules( + ruleBuilder().variation(1).clauses(clauseMatchingSegment(segment1)).build(), + ruleBuilder().variation(1).clauses(clauseMatchingSegment(segment2)).build(), + ruleBuilder().variation(1).clauses(clauseMatchingSegment(segment3)).build() + ) + .build(); + + // Set up the fake big segment store so that it will report a match only for segment3 with key2. + // Since segment1 and segment2 won't match, all three rules will be evaluated, and since each + // segment uses a different ContextKind, we will be testing keys from all three of the individual + // contexts. But two of those are the same key, and since big segment queries are cached per key, + // we should only see a single query for that one. + BigSegmentStoreWrapper.BigSegmentsQueryResult queryResultForKey2 = new BigSegmentStoreWrapper.BigSegmentsQueryResult(); + queryResultForKey2.status = BigSegmentsStatus.HEALTHY; + queryResultForKey2.membership = createMembershipFromSegmentRefs(asList(makeBigSegmentRef(segment3)), null); + + Evaluator.Getters mockGetters = strictMock(Evaluator.Getters.class); + expect(mockGetters.getSegment(segment1.getKey())).andReturn(segment1); + expect(mockGetters.getBigSegments(key1)).andReturn(null).times(1); + expect(mockGetters.getSegment(segment2.getKey())).andReturn(segment2); + expect(mockGetters.getBigSegments(key2)).andReturn(queryResultForKey2).times(1); + expect(mockGetters.getSegment(segment3.getKey())).andReturn(segment3); + replay(mockGetters); + + Evaluator evaluator = new Evaluator(mockGetters, testLogger); + EvalResult result = evaluator.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of(true), result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBucketingTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBucketingTest.java new file mode 100644 index 0000000..6e0a164 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorBucketingTest.java @@ -0,0 +1,166 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; + +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; + +import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class EvaluatorBucketingTest { + private Integer noSeed = null; + + @Test + public void variationIndexIsReturnedForBucket() { + LDContext context = LDContext.create("userkey"); + String flagKey = "flagkey"; + String salt = "salt"; + + // First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + // so we can construct a rollout whose second bucket just barely contains that value + int bucketValue = (int)(computeBucketValue(false, noSeed, context, null, flagKey, null, salt) * 100000); + assertThat(bucketValue, greaterThanOrEqualTo(1)); + assertThat(bucketValue, lessThan(100000)); + + int badVariationA = 0, matchedVariation = 1, badVariationB = 2; + List variations = Arrays.asList( + new WeightedVariation(badVariationA, bucketValue, true), // end of bucket range is not inclusive, so it will *not* match the target value + new WeightedVariation(matchedVariation, 1, true), // size of this bucket is 1, so it only matches that specific value + new WeightedVariation(badVariationB, 100000 - (bucketValue + 1), true)); + Rollout rollout = new Rollout(null, variations, null, RolloutKind.rollout, null); + + assertVariationIndexFromRollout(matchedVariation, rollout, context, flagKey, salt); + } + + @Test + public void usingSeedIsDifferentThanSalt() { + LDContext context = LDContext.create("userkey"); + String flagKey = "flagkey"; + String salt = "salt"; + Integer seed = 123; + + float bucketValue1 = computeBucketValue(false, noSeed, context, null, flagKey, null, salt); + float bucketValue2 = computeBucketValue(true, seed, context, null, flagKey, null, salt); + assert(bucketValue1 != bucketValue2); + } + + @Test + public void differentSeedsProduceDifferentAssignment() { + LDContext context = LDContext.create("userkey"); + String flagKey = "flagkey"; + String salt = "salt"; + Integer seed1 = 123; + Integer seed2 = 456; + + float bucketValue1 = computeBucketValue(true, seed1, context, null, flagKey, null, salt); + float bucketValue2 = computeBucketValue(true, seed2, context, null, flagKey, null, salt); + assert(bucketValue1 != bucketValue2); + } + + @Test + public void flagKeyAndSaltDoNotMatterWhenSeedIsUsed() { + LDContext context = LDContext.create("userkey"); + String flagKey1 = "flagkey"; + String flagKey2 = "flagkey2"; + String salt1 = "salt"; + String salt2 = "salt2"; + Integer seed = 123; + + float bucketValue1 = computeBucketValue(true, seed, context, null, flagKey1, null, salt1); + float bucketValue2 = computeBucketValue(true, seed, context, null, flagKey2, null, salt2); + assert(bucketValue1 == bucketValue2); + } + + @Test + public void lastBucketIsUsedIfBucketValueEqualsTotalWeight() { + LDContext context = LDContext.create("userkey"); + String flagKey = "flagkey"; + String salt = "salt"; + + // We'll construct a list of variations that stops right at the target bucket value + int bucketValue = (int)(computeBucketValue(false, noSeed, context, null, flagKey, null, salt) * 100000); + + List variations = Arrays.asList(new WeightedVariation(0, bucketValue, true)); + Rollout rollout = new Rollout(null, variations, null, RolloutKind.rollout, null); + + assertVariationIndexFromRollout(0, rollout, context, flagKey, salt); + } + + @Test + public void canBucketByIntAttributeSameAsString() { + LDContext context = LDContext.builder("key") + .set("stringattr", "33333") + .set("intattr", 33333) + .build(); + float resultForString = computeBucketValue(false, noSeed, context, null, "key", AttributeRef.fromLiteral("stringattr"), "salt"); + float resultForInt = computeBucketValue(false, noSeed, context, null, "key", AttributeRef.fromLiteral("intattr"), "salt"); + assertEquals(resultForString, resultForInt, Float.MIN_VALUE); + } + + @Test + public void cannotBucketByFloatAttribute() { + LDContext context = LDContext.builder("key") + .set("floatattr", 33.5f) + .build(); + float result = computeBucketValue(false, noSeed, context, null, "key", AttributeRef.fromLiteral("floatattr"), "salt"); + assertEquals(0f, result, Float.MIN_VALUE); + } + + @Test + public void cannotBucketByBooleanAttribute() { + LDContext context = LDContext.builder("key") + .set("boolattr", true) + .build(); + float result = computeBucketValue(false, noSeed, context, null, "key", AttributeRef.fromLiteral("boolattr"), "salt"); + assertEquals(0f, result, Float.MIN_VALUE); + } + + private static void assertVariationIndexFromRollout( + int expectedVariation, + Rollout rollout, + LDContext context, + String flagKey, + String salt + ) { + FeatureFlag flag1 = ModelBuilders.flagBuilder(flagKey) + .on(true) + .generatedVariations(3) + .fallthrough(rollout) + .salt(salt) + .build(); + EvalResult result1 = BASE_EVALUATOR.evaluate(flag1, context, expectNoPrerequisiteEvals()); + assertThat(result1.getReason(), equalTo(EvaluationReason.fallthrough())); + assertThat(result1.getVariationIndex(), equalTo(expectedVariation)); + + // Make sure we consistently apply the rollout regardless of whether it's in a rule or a fallthrough + FeatureFlag flag2 = ModelBuilders.flagBuilder(flagKey) + .on(true) + .generatedVariations(3) + .rules(ModelBuilders.ruleBuilder() + .rollout(rollout) + .clauses(clauseMatchingContext(context)) + .build()) + .salt(salt) + .build(); + EvalResult result2 = BASE_EVALUATOR.evaluate(flag2, context, expectNoPrerequisiteEvals()); + assertThat(result2.getReason().getKind(), equalTo(EvaluationReason.Kind.RULE_MATCH)); + assertThat(result2.getVariationIndex(), equalTo(expectedVariation)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorClauseTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorClauseTest.java new file mode 100644 index 0000000..4c1317f --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorClauseTest.java @@ -0,0 +1,304 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; + +import org.junit.Test; + +import static com.launchdarkly.sdk.EvaluationDetail.fromValue; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.booleanFlagWithClauses; +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.fallthroughVariation; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.negateClause; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestUtil.TEST_GSON_INSTANCE; +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +@SuppressWarnings("javadoc") +public class EvaluatorClauseTest extends EvaluatorTestBase { + private static void assertMatch(Evaluator eval, FeatureFlag flag, LDContext context, boolean expectMatch) { + assertEquals(LDValue.of(expectMatch), eval.evaluate(flag, context, expectNoPrerequisiteEvals()).getValue()); + } + + private static Segment makeSegmentThatMatchesUser(String segmentKey, String userKey) { + return segmentBuilder(segmentKey).included(userKey).build(); + } + + @Test + public void clauseCanMatchBuiltInAttribute() throws Exception { + Clause clause = clause("name", Operator.in, LDValue.of("Bob")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Bob").build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseCanMatchCustomAttribute() throws Exception { + Clause clause = clause("legs", Operator.in, LDValue.of(4)); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("legs", 4).build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseReturnsFalseForMissingAttribute() throws Exception { + Clause clause = clause("legs", Operator.in, LDValue.of(4)); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Bob").build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void clauseMatchesContextValueToAnyOfMultipleValues() throws Exception { + Clause clause = clause("name", Operator.in, LDValue.of("Bob"), LDValue.of("Carol")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Carol").build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseMatchesContextValueToAnyOfMultipleValuesWithNonEqualityOperator() throws Exception { + // We check this separately because of the special preprocessing logic for equality matches. + Clause clause = clause("name", Operator.contains, LDValue.of("Bob"), LDValue.of("Carol")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Caroline").build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseMatchesArrayOfContextValuesToClauseValue() throws Exception { + Clause clause = clause("alias", Operator.in, LDValue.of("Maurice")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("alias", + LDValue.buildArray().add("Space Cowboy").add("Maurice").build()).build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseFindsNoMatchInArrayOfContextValues() throws Exception { + Clause clause = clause("alias", Operator.in, LDValue.of("Ma")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("alias", + LDValue.buildArray().add("Mary").add("May").build()).build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void matchFailsIfContextValueIsAnArrayOfArrays() throws Exception { + LDValue arrayValue = LDValue.buildArray().add("thing").build(); + LDValue arrayOfArrays = LDValue.buildArray().add(arrayValue).build(); + Clause clause = clause("data", Operator.in, arrayOfArrays); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("data", arrayOfArrays).build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void matchFailsIfContextValueIsAnObject() throws Exception { + LDValue objectValue = LDValue.buildObject().put("thing", LDValue.of(true)).build(); + Clause clause = clause("data", Operator.in, objectValue); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("data", objectValue).build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void matchFailsIfContextValueIsAnArrayOfObjects() throws Exception { + LDValue objectValue = LDValue.buildObject().put("thing", LDValue.of(true)).build(); + LDValue arrayOfObjects = LDValue.buildArray().add(objectValue).build(); + Clause clause = clause("data", Operator.in, arrayOfObjects); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").set("data", arrayOfObjects).build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void clauseReturnsFalseForNullOperator() throws Exception { + Clause clause = clause("key", null, LDValue.of("key")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.create("key"); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void clauseCanBeNegatedToReturnFalse() throws Exception { + Clause clause = negateClause(clause("key", Operator.in, LDValue.of("key"))); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Bob").build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void clauseCanBeNegatedToReturnTrue() throws Exception { + Clause clause = negateClause(clause("key", Operator.in, LDValue.of("other"))); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key").name("Bob").build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseWithUnsupportedOperatorStringIsUnmarshalledWithNullOperator() throws Exception { + // This just verifies that GSON will give us a null in this case instead of throwing an exception, + // so we fail as gracefully as possible if a new operator type has been added in the application + // and the SDK hasn't been upgraded yet. + String badClauseJson = "{\"attribute\":\"name\",\"operator\":\"doesSomethingUnsupported\",\"values\":[\"x\"]}"; + Clause clause = TEST_GSON_INSTANCE.fromJson(badClauseJson, DataModel.Clause.class); + assertNotNull(clause); + + String json = TEST_GSON_INSTANCE.toJson(clause); + String expectedJson = "{\"attribute\":\"name\",\"values\":[\"x\"],\"negate\":false}"; + assertJsonEquals(expectedJson, json); + } + + @Test + public void clauseWithNullOperatorDoesNotMatch() throws Exception { + Clause badClause = clause("name", null, LDValue.of("Bob")); + FeatureFlag f = booleanFlagWithClauses("flag", badClause); + LDContext context = LDContext.builder("key").name("Bob").build(); + + assertMatch(BASE_EVALUATOR, f, context, false); + } + + @Test + public void clauseWithNullOperatorDoesNotStopSubsequentRuleFromMatching() throws Exception { + Clause badClause = clause("name", null, LDValue.of("Bob")); + Rule badRule = ruleBuilder().id("rule1").clauses(badClause).variation(1).build(); + Clause goodClause = clause("name", Operator.in, LDValue.of("Bob")); + Rule goodRule = ruleBuilder().id("rule2").clauses(goodClause).variation(1).build(); + FeatureFlag f = flagBuilder("feature") + .on(true) + .rules(badRule, goodRule) + .fallthrough(fallthroughVariation(0)) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)) + .build(); + LDContext context = LDContext.builder("key").name("Bob").build(); + + EvaluationDetail details = BASE_EVALUATOR.evaluate(f, context, expectNoPrerequisiteEvals()).getAnyType(); + assertEquals(fromValue(LDValue.of(true), 1, EvaluationReason.ruleMatch(1, "rule2")), details); + } + + @Test + public void clauseCanGetValueWithAttributeReference() throws Exception { + Clause clause = clause(null, AttributeRef.fromPath("/address/city"), Operator.in, LDValue.of("Oakland")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.builder("key") + .set("address", LDValue.parse("{\"city\":\"Oakland\",\"state\":\"CA\"}")) + .build(); + + assertMatch(BASE_EVALUATOR, f, context, true); + } + + @Test + public void clauseMatchUsesContextKind() throws Exception { + Clause clause = clause(ContextKind.of("company"), "name", Operator.in, LDValue.of("Catco")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context1 = LDContext.builder("cc").kind("company").name("Catco").build(); + LDContext context2 = LDContext.builder("l").name("Lucy").build(); + LDContext context3 = LDContext.createMulti(context1, context2); + + assertMatch(BASE_EVALUATOR, f, context1, true); + assertMatch(BASE_EVALUATOR, f, context2, false); + assertMatch(BASE_EVALUATOR, f, context3, true); + } + + @Test + public void clauseMatchByKindAttribute() throws Exception { + Clause clause = clause(null, "kind", Operator.startsWith, LDValue.of("a")); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context1 = LDContext.create("key"); + LDContext context2 = LDContext.create(ContextKind.of("ab"), "key"); + LDContext context3 = LDContext.createMulti( + LDContext.create(ContextKind.of("cd"), "key"), + LDContext.create(ContextKind.of("ab"), "key")); + + assertMatch(BASE_EVALUATOR, f, context1, false); + assertMatch(BASE_EVALUATOR, f, context2, true); + assertMatch(BASE_EVALUATOR, f, context3, true); + } + + @Test + public void clauseReturnsMalformedFlagErrorForAttributeNotSpecified() { + Clause clause = clause(null, (AttributeRef)null, Operator.in, LDValue.of(4)); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.create("key"); + + EvalResult result = BASE_EVALUATOR.evaluate(f, context, expectNoPrerequisiteEvals()); + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void clauseReturnsMalformedFlagErrorForMalformedAttributeReference() { + Clause clause = clause(null, AttributeRef.fromPath("///"), Operator.in, LDValue.of(4)); + FeatureFlag f = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.create("key"); + + EvalResult result = BASE_EVALUATOR.evaluate(f, context, expectNoPrerequisiteEvals()); + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void testSegmentMatchClauseRetrievesSegmentFromStore() throws Exception { + String segmentKey = "segkey"; + Clause clause = clauseMatchingSegment(segmentKey); + FeatureFlag flag = booleanFlagWithClauses("flag", clause); + Segment segment = makeSegmentThatMatchesUser(segmentKey, "foo"); + LDContext context = LDContext.create("foo"); + + Evaluator e = evaluatorBuilder().withStoredSegments(segment).build(); + assertMatch(e, flag, context, true); + } + + @Test + public void testSegmentMatchClauseFallsThroughIfSegmentNotFound() throws Exception { + String segmentKey = "segkey"; + Clause clause = clauseMatchingSegment(segmentKey); + FeatureFlag flag = booleanFlagWithClauses("flag", clause); + LDContext context = LDContext.create("foo"); + + Evaluator e = evaluatorBuilder().withNonexistentSegment(segmentKey).build(); + assertMatch(e, flag, context, false); + } + + @Test + public void testSegmentMatchClauseIgnoresNonStringValues() throws Exception { + String segmentKey = "segkey"; + Clause clause = clause(null, (AttributeRef)null, Operator.segmentMatch, + LDValue.of(123), LDValue.of(segmentKey)); + FeatureFlag flag = booleanFlagWithClauses("flag", clause); + Segment segment = makeSegmentThatMatchesUser(segmentKey, "foo"); + LDContext context = LDContext.create("foo"); + + Evaluator e = evaluatorBuilder().withStoredSegments(segment).build(); + assertMatch(e, flag, context, true); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorOperatorsParameterizedTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorOperatorsParameterizedTest.java new file mode 100644 index 0000000..a37f834 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorOperatorsParameterizedTest.java @@ -0,0 +1,187 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.Operator; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static com.launchdarkly.sdk.server.EvaluatorHelpers.matchClauseWithoutSegments; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +@RunWith(Parameterized.class) +public class EvaluatorOperatorsParameterizedTest { + private static final LDValue invalidVer = LDValue.of("xbad%ver"); + + private static final AttributeRef userAttr = AttributeRef.fromLiteral("attr"); + + private final Operator op; + private final LDValue userValue; + private final LDValue clauseValue; + private final LDValue[] extraClauseValues; + private final boolean shouldBe; + + public EvaluatorOperatorsParameterizedTest( + Operator op, + LDValue userValue, + LDValue clauseValue, + LDValue[] extraClauseValues, + boolean shouldBe + ) { + this.op = op; + this.userValue = userValue; + this.clauseValue = clauseValue; + this.extraClauseValues = extraClauseValues; + this.shouldBe = shouldBe; + } + + @Parameterized.Parameters(name = "{1} {0} {2}+{3} should be {4}") + public static Iterable data() { + ImmutableList.Builder tests = ImmutableList.builder(); + + tests.add(new Object[][] { + // numeric comparisons + { Operator.in, LDValue.of(99), LDValue.of(99), null, true }, + { Operator.in, LDValue.of(99), LDValue.of(99), new LDValue[] { LDValue.of(98), LDValue.of(97), LDValue.of(96) }, true }, + { Operator.in, LDValue.of(99.0001), LDValue.of(99.0001), new LDValue[] { LDValue.of(98), LDValue.of(97), LDValue.of(96) }, true }, + { Operator.in, LDValue.of(99.0001), LDValue.of(99.0001), null, true }, + { Operator.in, LDValue.of(99), LDValue.of(99.0001), null, false }, + { Operator.in, LDValue.of(99.0001), LDValue.of(99), null, false }, + { Operator.lessThan, LDValue.of(99), LDValue.of(99.0001), null, true }, + { Operator.lessThan, LDValue.of(99.0001), LDValue.of(99), null, false }, + { Operator.lessThan, LDValue.of(99), LDValue.of(99), null, false }, + { Operator.lessThanOrEqual, LDValue.of(99), LDValue.of(99.0001), null, true }, + { Operator.lessThanOrEqual, LDValue.of(99.0001), LDValue.of(99), null, false }, + { Operator.lessThanOrEqual, LDValue.of(99), LDValue.of(99), null, true }, + { Operator.greaterThan, LDValue.of(99.0001), LDValue.of(99), null, true }, + { Operator.greaterThan, LDValue.of(99), LDValue.of(99.0001), null, false }, + { Operator.greaterThan, LDValue.of(99), LDValue.of(99), null, false }, + { Operator.greaterThanOrEqual, LDValue.of(99.0001), LDValue.of(99), null, true }, + { Operator.greaterThanOrEqual, LDValue.of(99), LDValue.of(99.0001), null, false }, + { Operator.greaterThanOrEqual, LDValue.of(99), LDValue.of(99), null, true }, + + // string comparisons + { Operator.in, LDValue.of("x"), LDValue.of("x"), null, true }, + { Operator.in, LDValue.of("x"), LDValue.of("xyz"), null, false }, + { Operator.in, LDValue.of("x"), LDValue.of("x"), new LDValue[] { LDValue.of("a"), LDValue.of("b"), LDValue.of("c") }, true }, + { Operator.startsWith, LDValue.of("xyz"), LDValue.of("x"), null, true }, + { Operator.startsWith, LDValue.of("x"), LDValue.of("xyz"), null, false }, + { Operator.startsWith, LDValue.of(1), LDValue.of("xyz"), null, false }, + { Operator.startsWith, LDValue.of("1xyz"), LDValue.of(1), null, false }, + { Operator.endsWith, LDValue.of("xyz"), LDValue.of("z"), null, true }, + { Operator.endsWith, LDValue.of("z"), LDValue.of("xyz"), null, false }, + { Operator.endsWith, LDValue.of(1), LDValue.of("xyz"), null, false }, + { Operator.endsWith, LDValue.of("xyz1"), LDValue.of(1), null, false }, + { Operator.contains, LDValue.of("xyz"), LDValue.of("y"), null, true }, + { Operator.contains, LDValue.of("y"), LDValue.of("xyz"), null, false }, + { Operator.contains, LDValue.of(2), LDValue.of("xyz"), null, false }, + { Operator.contains, LDValue.of("that 2 is not a string"), LDValue.of(2), null, false }, + + // mixed strings and numbers + { Operator.in, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.in, LDValue.of(99), LDValue.of("99"), null, false }, + { Operator.contains, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.startsWith, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.endsWith, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.lessThanOrEqual, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.lessThanOrEqual, LDValue.of(99), LDValue.of("99"), null, false }, + { Operator.greaterThanOrEqual, LDValue.of("99"), LDValue.of(99), null, false }, + { Operator.greaterThanOrEqual, LDValue.of(99), LDValue.of("99"), null, false }, + + // boolean values + { Operator.in, LDValue.of(true), LDValue.of(true), null, true }, + { Operator.in, LDValue.of(false), LDValue.of(false), null, true }, + { Operator.in, LDValue.of(true), LDValue.of(false), null, false }, + { Operator.in, LDValue.of(false), LDValue.of(true), null, false }, + { Operator.in, LDValue.of(true), LDValue.of(false), new LDValue[] { LDValue.of(true) }, true }, + + // regex + { Operator.matches, LDValue.of("hello world"), LDValue.of("hello.*rld"), null, true }, + { Operator.matches, LDValue.of("hello world"), LDValue.of("hello.*orl"), null, true }, + { Operator.matches, LDValue.of("hello world"), LDValue.of("l+"), null, true }, + { Operator.matches, LDValue.of("hello world"), LDValue.of("(world|planet)"), null, true }, + { Operator.matches, LDValue.of("hello world"), LDValue.of("aloha"), null, false }, + // note that an invalid regex in a clause should *not* cause an exception, just a non-match + { Operator.matches, LDValue.of("hello world"), LDValue.of("***not a regex"), null, false }, + { Operator.matches, LDValue.of(2), LDValue.of("that 2 is not a string"), null, false }, + + // semver + { Operator.semVerEqual, LDValue.of("2.0.1"), LDValue.of("2.0.1"), null, true }, + { Operator.semVerEqual, LDValue.of("2.0.2"), LDValue.of("2.0.1"), null, false }, + { Operator.semVerEqual, LDValue.of("2.0"), LDValue.of("2.0.0"), null, true }, + { Operator.semVerEqual, LDValue.of("2"), LDValue.of("2.0.0"), null, true }, + { Operator.semVerEqual, LDValue.of("2-rc1"), LDValue.of("2.0.0-rc1"), null, true }, + { Operator.semVerEqual, LDValue.of("2+build2"), LDValue.of("2.0.0+build2"), null, true }, + { Operator.semVerEqual, LDValue.of("xxx"), LDValue.of("2.0.1"), null, false }, + { Operator.semVerEqual, LDValue.of(2), LDValue.of("2.0.1"), null, false }, + { Operator.semVerEqual, LDValue.of("2.0.1"), LDValue.of("xxx"), null, false }, + { Operator.semVerEqual, LDValue.of("2.0.1"), LDValue.of(2), null, false }, + { Operator.semVerLessThan, LDValue.of("2.0.0"), LDValue.of("2.0.1"), null, true }, + { Operator.semVerLessThan, LDValue.of("2.0"), LDValue.of("2.0.1"), null, true }, + { Operator.semVerLessThan, LDValue.of("2.0.1"), LDValue.of("2.0.0"), null, false }, + { Operator.semVerLessThan, LDValue.of("2.0.1"), LDValue.of("2.0"), null, false }, + { Operator.semVerLessThan, LDValue.of("2.0.0-rc"), LDValue.of("2.0.0"), null, true }, + { Operator.semVerLessThan, LDValue.of("2.0.0-rc"), LDValue.of("2.0.0-rc.beta"), null, true }, + { Operator.semVerGreaterThan, LDValue.of("2.0.1"), LDValue.of("2.0.0"), null, true }, + { Operator.semVerGreaterThan, LDValue.of("2.0.1"), LDValue.of("2.0"), null, true }, + { Operator.semVerGreaterThan, LDValue.of("2.0.0"), LDValue.of("2.0.1"), null, false }, + { Operator.semVerGreaterThan, LDValue.of("2.0"), LDValue.of("2.0.1"), null, false }, + { Operator.semVerGreaterThan, LDValue.of("2.0.0-rc.1"), LDValue.of("2.0.0-rc.0"), null, true }, + { Operator.semVerLessThan, LDValue.of("2.0.1"), invalidVer, null, false }, + { Operator.semVerGreaterThan, LDValue.of("2.0.1"), invalidVer, null, false }, + + // miscellaneous invalid conditions + { null, LDValue.of("x"), LDValue.of("y"), null, false }, // no operator + { Operator.segmentMatch, LDValue.of("x"), LDValue.of("y"), null, false } // segmentMatch is handled elsewhere + }); + + // add permutations of date values for before & after operators + // dateStr1, dateStrUtc1, and dateMs1 are the same timestamp in different formats; etc. + LDValue dateStr1 = LDValue.of("2017-12-06T00:00:00.000-07:00"); + LDValue dateStrUtc1 = LDValue.of("2017-12-06T07:00:00.000Z"); + LDValue dateMs1 = LDValue.of(1512543600000L); + LDValue dateStr2 = LDValue.of("2017-12-06T00:00:01.000-07:00"); + LDValue dateStrUtc2 = LDValue.of("2017-12-06T07:00:01.000Z"); + LDValue dateMs2 = LDValue.of(1512543601000L); + LDValue invalidDate = LDValue.of("hey what's this?"); + for (LDValue lowerValue: new LDValue[] { dateStr1, dateStrUtc1, dateMs1 }) { + for (LDValue higherValue: new LDValue[] { dateStr2, dateStrUtc2, dateMs2 }) { + tests.add(new Object[] { Operator.before, lowerValue, higherValue, null, true }); + tests.add(new Object[] { Operator.before, lowerValue, lowerValue, null, false }); + tests.add(new Object[] { Operator.before, higherValue, lowerValue, null, false }); + tests.add(new Object[] { Operator.before, lowerValue, invalidDate, null, false }); + tests.add(new Object[] { Operator.after, higherValue, lowerValue, null, true }); + tests.add(new Object[] { Operator.after, lowerValue, lowerValue, null, false }); + tests.add(new Object[] { Operator.after, lowerValue, higherValue, null, false}); + tests.add(new Object[] { Operator.after, lowerValue, invalidDate, null, false}); + } + } + + return tests.build(); + } + + @Test + public void parameterizedTestComparison() { + List values = new ArrayList<>(5); + if (extraClauseValues != null) { + values.addAll(Arrays.asList(extraClauseValues)); + } + values.add(clauseValue); + + Clause clause1 = new Clause(null, userAttr, op, values, false); + assertEquals("without preprocessing", shouldBe, matchClauseWithoutSegments(clause1, userValue)); + + Clause clause2 = new Clause(null, userAttr, op, values, false); + DataModelPreprocessing.preprocessClause(clause2); + assertEquals("without preprocessing", shouldBe, matchClauseWithoutSegments(clause2, userValue)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorPrerequisiteTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorPrerequisiteTest.java new file mode 100644 index 0000000..38a716f --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorPrerequisiteTest.java @@ -0,0 +1,226 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.Iterables; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.EvaluatorTestUtil.PrereqEval; +import com.launchdarkly.sdk.server.EvaluatorTestUtil.PrereqRecorder; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_USER; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.FALLTHROUGH_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.FALLTHROUGH_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.GREEN_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.GREEN_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.OFF_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.OFF_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.RED_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.RED_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.buildRedGreenFlag; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.buildThreeWayFlag; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.evaluatorBuilder; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class EvaluatorPrerequisiteTest { + @Test + public void flagReturnsOffVariationIfPrerequisiteIsNotFound() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .build(); + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + } + + @Test + public void flagReturnsOffVariationAndEventIfPrerequisiteIsOff() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(false) + .offVariation(GREEN_VARIATION) + // note that even though it returns the desired variation, it is still off and therefore not a match + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval.result.getValue()); + } + + @Test + public void flagReturnsOffVariationAndEventIfPrerequisiteIsNotMet() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .fallthroughVariation(RED_VARIATION) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(RED_VARIATION, eval.result.getVariationIndex()); + assertEquals(RED_VALUE, eval.result.getValue()); + } + + @Test + public void prerequisiteFailedResultInstanceIsReusedForSamePrerequisite() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result0 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult result1 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(expectedReason, result0.getReason()); + assertSame(result0, result1); + } + + @Test + public void prerequisiteFailedReasonInstanceCanBeCreatedFromScratch() throws Exception { + // Normally we will always do the preprocessing step that creates the reason instances ahead of time, + // but if somehow we didn't, it should create them as needed + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .disablePreprocessing(true) + .build(); + assertNull(f0.getPrerequisites().get(0).preprocessed); + + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result0 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult result1 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(expectedReason, result0.getReason()); + assertNotSame(result0.getReason(), result1.getReason()); // they were created individually + assertEquals(result0.getReason(), result1.getReason()); // but they're equal + } + + @Test + public void flagReturnsFallthroughVariationAndEventIfPrerequisiteIsMetAndThereAreNoRules() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .fallthroughVariation(GREEN_VARIATION) + .version(2) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + assertEquals(EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval.result.getValue()); + } + + @Test + public void multipleLevelsOfPrerequisitesProduceMultipleEvents() throws Exception { + FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .prerequisites(prerequisite("feature2", GREEN_VARIATION)) + .fallthroughVariation(GREEN_VARIATION) + .build(); + FeatureFlag f2 = buildRedGreenFlag("feature2") + .on(true) + .fallthroughVariation(GREEN_VARIATION) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1, f2).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + assertEquals(EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()), result); + + assertEquals(2, Iterables.size(recordPrereqs.evals)); + + PrereqEval eval0 = recordPrereqs.evals.get(0); + assertEquals(f2, eval0.flag); + assertEquals(f1, eval0.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval0.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval0.result.getValue()); + + PrereqEval eval1 = recordPrereqs.evals.get(1); + assertEquals(f1, eval1.flag); + assertEquals(f0, eval1.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval1.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval1.result.getValue()); + } + + @Test + public void prerequisiteCycleDetection() { + for (int depth = 1; depth <= 4; depth++) { + String[] flagKeys = new String[depth]; + for (int i = 0; i < depth; i++) { + flagKeys[i] = "flagkey" + i; + } + FeatureFlag[] flags = new FeatureFlag[depth]; + for (int i = 0; i < depth; i++) { + flags[i] = flagBuilder(flagKeys[i]) + .on(true) + .variations(false, true) + .offVariation(0) + .prerequisites( + new Prerequisite(flagKeys[(i + 1) % depth], 0) + ) + .build(); + } + + Evaluator e = evaluatorBuilder().withStoredFlags(flags).build(); + + LDContext context = LDContext.create("foo"); + EvalResult result = e.evaluate(flags[0], context, expectNoPrerequisiteEvals()); + assertEquals(EvalResult.error(ErrorKind.MALFORMED_FLAG), result); + // Note, we specified expectNoPrerequisiteEvals() above because we do not expect the evaluator + // to *finish* evaluating any of these prerequisites (it can't, because of the cycle), and so + // it won't get as far as emitting any prereq evaluation results. + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorRuleTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorRuleTest.java new file mode 100644 index 0000000..9712174 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorRuleTest.java @@ -0,0 +1,198 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; +import com.launchdarkly.sdk.server.ModelBuilders.FlagBuilder; +import com.launchdarkly.sdk.server.ModelBuilders.RuleBuilder; + +import org.junit.Test; + +import java.util.Arrays; + +import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.emptyRollout; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class EvaluatorRuleTest { + private static final LDContext BASE_USER = LDContext.create("userkey"); + private static final LDContext OTHER_USER = LDContext.create("otherkey"); + private static final int FALLTHROUGH_VARIATION = 0; + private static final int MATCH_VARIATION = 1; + + private FlagBuilder buildBooleanFlagWithRules(String flagKey, DataModel.Rule... rules) { + return flagBuilder(flagKey) + .on(true) + .rules(rules) + .fallthroughVariation(FALLTHROUGH_VARIATION) + .offVariation(FALLTHROUGH_VARIATION) + .variations(LDValue.of(false), LDValue.of(true)); + } + + private RuleBuilder buildTestRule(String id, DataModel.Clause... clauses) { + return ruleBuilder().id(id).clauses(clauses).variation(MATCH_VARIATION); + } + + @Test + public void ruleMatchResultInstanceIsReusedForSameRule() { + Clause clause0 = clauseMatchingContext(OTHER_USER); + Clause clause1 = clauseMatchingContext(BASE_USER); + DataModel.Rule rule0 = buildTestRule("ruleid0", clause0).build(); + DataModel.Rule rule1 = buildTestRule("ruleid1", clause1).build(); + + DataModel.FeatureFlag f = buildBooleanFlagWithRules("feature", rule0, rule1).build(); + + EvalResult sameResult0 = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult sameResult1 = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult otherResult = BASE_EVALUATOR.evaluate(f, OTHER_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvaluationReason.ruleMatch(1, "ruleid1"), sameResult0.getReason()); + assertSame(sameResult0, sameResult1); + + assertEquals(EvaluationReason.ruleMatch(0, "ruleid0"), otherResult.getReason()); + } + + @Test + public void ruleMatchResultInstanceCanBeCreatedFromScratch() { + // Normally we will always do the preprocessing step that creates the result instances ahead of time, + // but if somehow we didn't, it should create them as needed + DataModel.Clause clause = clause("key", DataModel.Operator.in, LDValue.of("userkey")); + DataModel.Rule rule = buildTestRule("ruleid", clause).build(); + LDContext user = LDContext.create("userkey"); + + DataModel.FeatureFlag f = buildBooleanFlagWithRules("feature", rule) + .disablePreprocessing(true) + .build(); + assertNull(f.getRules().get(0).preprocessed); + + EvalResult result1 = BASE_EVALUATOR.evaluate(f, user, expectNoPrerequisiteEvals()); + EvalResult result2 = BASE_EVALUATOR.evaluate(f, user, expectNoPrerequisiteEvals()); + + assertEquals(EvaluationReason.ruleMatch(0, "ruleid"), result1.getReason()); + assertNotSame(result1, result2); // they were created individually + assertEquals(result1, result2); // but they're equal + } + + @Test + public void ruleWithTooHighVariationReturnsMalformedFlagError() { + Clause clause = clauseMatchingContext(BASE_USER); + Rule rule = buildTestRule("ruleid", clause).variation(999).build(); + FeatureFlag f = buildBooleanFlagWithRules("feature", rule).build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void ruleWithNegativeVariationReturnsMalformedFlagError() { + Clause clause = clauseMatchingContext(BASE_USER); + Rule rule = buildTestRule("ruleid", clause).variation(-1).build(); + FeatureFlag f = buildBooleanFlagWithRules("feature", rule).build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void ruleWithNoVariationOrRolloutReturnsMalformedFlagError() { + Clause clause = clauseMatchingContext(BASE_USER); + Rule rule = buildTestRule("ruleid", clause).variation(null).build(); + FeatureFlag f = buildBooleanFlagWithRules("feature", rule).build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void ruleWithRolloutWithEmptyVariationsListReturnsMalformedFlagError() { + Clause clause = clauseMatchingContext(BASE_USER); + Rule rule = buildTestRule("ruleid", clause).variation(null).rollout(emptyRollout()).build(); + FeatureFlag f = buildBooleanFlagWithRules("feature", rule).build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void rolloutUsesCorrectBucketValue() { + LDContext c = LDContext.create("foo"); + testRolloutBucketing("foo", c, null, null, RolloutKind.rollout); + } + + @Test + public void rolloutUsesContextKind() { + LDContext c1 = LDContext.create(ContextKind.of("kind1"), "foo"); + LDContext c2 = LDContext.create(ContextKind.of("kind2"), "bar"); + LDContext multi = LDContext.createMulti(c1, c2); + testRolloutBucketing("foo", multi, ContextKind.of("kind1"), null, RolloutKind.rollout); + } + + @Test + public void rolloutUsesBucketBy() { + LDContext c = LDContext.builder("xxx").set("attr1", LDValue.parse("{\"prop1\":\"foo\"}")).build(); + testRolloutBucketing("foo", c, null, AttributeRef.fromPath("/attr1/prop1"), RolloutKind.rollout); + } + + @Test + public void experimentIgnoresBucketBy() { + LDContext c = LDContext.builder("xxx").set("attr1", LDValue.parse("{\"prop1\":\"foo\"}")).build(); + testRolloutBucketing("xxx", c, null, AttributeRef.fromPath("/attr1/prop1"), RolloutKind.experiment); + } + + private static void testRolloutBucketing( + String bucketByValue, + LDContext context, + ContextKind contextKind, + AttributeRef bucketBy, + RolloutKind rolloutKind + ) { + String flagKey = "feature"; + String salt = "abc"; + float expectedBucketValue = computeBucketValue(false, null, LDContext.create(bucketByValue), null, + flagKey, null, salt); + int bucketValueAsInt = (int)(expectedBucketValue * 100000); + Clause clause = clauseMatchingContext(context); + + // To roughly verify that the right bucket value is being used, we'll construct a rollout + // where the target bucket is in a very small range around that value. + Rollout rollout = new Rollout( + contextKind, + Arrays.asList( + new WeightedVariation(0, bucketValueAsInt - 1, false), + new WeightedVariation(1, 2, false), + new WeightedVariation(2, 100000 - (bucketValueAsInt + 1), false) + ), + bucketBy, + rolloutKind, + null); + FeatureFlag flag = flagBuilder(flagKey) + .on(true) + .variations(LDValue.of("no"), LDValue.of("yes"), LDValue.of("no")) + .rules(ruleBuilder().id("rule").clauses(clause).rollout(rollout).build()) + .salt(salt) + .build(); + + EvalResult result = BASE_EVALUATOR.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertEquals(LDValue.of("yes"), result.getValue()); + assertEquals(1, result.getVariationIndex()); + assertEquals(EvaluationReason.Kind.RULE_MATCH, result.getReason().getKind()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorSegmentMatchTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorSegmentMatchTest.java new file mode 100644 index 0000000..e47448a --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorSegmentMatchTest.java @@ -0,0 +1,279 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.ModelBuilders.SegmentBuilder; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.booleanFlagWithClauses; +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.negateClause; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentRuleBuilder; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class EvaluatorSegmentMatchTest extends EvaluatorTestBase { + private static final String SEGMENT_KEY = "segmentkey"; + private static final String ARBITRARY_SALT = "abcdef"; + private static final int maxWeight = 100000; + + @Test + public void explicitIncludeUser() { + LDContext c = LDContext.create("foo"); + Segment s = baseSegmentBuilder() + .included(c.getKey()) + .build(); + + assertTrue(segmentMatchesContext(s, c)); + } + + @Test + public void explicitExcludeUser() { + LDContext c = LDContext.create("foo"); + Segment s = baseSegmentBuilder() + .excluded(c.getKey()) + .rules(segmentRuleBuilder().clauses(clauseMatchingContext(c)).build()) + .build(); + + assertFalse(segmentMatchesContext(s, c)); + } + + @Test + public void explicitIncludeHasPrecedence() { + LDContext c = LDContext.create("foo"); + Segment s = baseSegmentBuilder() + .included(c.getKey()) + .excluded(c.getKey()) + .build(); + + assertTrue(segmentMatchesContext(s, c)); + } + + @Test + public void includedKeyForContextKind() { + ContextKind kind1 = ContextKind.of("kind1"); + String key = "foo"; + LDContext c1 = LDContext.create(key); + LDContext c2 = LDContext.create(kind1, key); + LDContext c3 = LDContext.createMulti(c1, c2); + + Segment s = baseSegmentBuilder() + .includedContexts(kind1, key) + .build(); + + assertFalse(segmentMatchesContext(s, c1)); + assertTrue(segmentMatchesContext(s, c2)); + assertTrue(segmentMatchesContext(s, c3)); + } + + @Test + public void excludedKeyForContextKind() { + ContextKind kind1 = ContextKind.of("kind1"); + String key = "foo"; + LDContext c1 = LDContext.create(key); + LDContext c2 = LDContext.create(kind1, key); + LDContext c3 = LDContext.createMulti(c1, c2); + + Segment s = baseSegmentBuilder() + .excludedContexts(kind1, key) + .rules( + segmentRuleBuilder().clauses(clauseMatchingContext(c1)).build(), + segmentRuleBuilder().clauses(clauseMatchingContext(c2)).build(), + segmentRuleBuilder().clauses(clauseMatchingContext(c3)).build() + ) + .build(); + + assertTrue(segmentMatchesContext(s, c1)); // rule matched, wasn't excluded + assertFalse(segmentMatchesContext(s, c2)); // rule matched but was excluded + assertFalse(segmentMatchesContext(s, c3)); // rule matched but was excluded + } + + @Test + public void matchingRuleWithFullRollout() { + LDContext c = LDContext.create("foo"); + Clause clause = clauseMatchingContext(c); + SegmentRule rule = segmentRuleBuilder().clauses(clause).weight(maxWeight).build(); + Segment s = baseSegmentBuilder() + .rules(rule) + .build(); + + assertTrue(segmentMatchesContext(s, c)); + } + + @Test + public void matchingRuleWithZeroRollout() { + LDContext c = LDContext.create("foo"); + Clause clause = clauseMatchingContext(c); + SegmentRule rule = segmentRuleBuilder().clauses(clause).weight(0).build(); + Segment s = baseSegmentBuilder() + .rules(rule) + .build(); + + assertFalse(segmentMatchesContext(s, c)); + } + + @Test + public void matchingRuleWithMultipleClauses() { + Clause clause1 = clause("email", DataModel.Operator.in, LDValue.of("test@example.com")); + Clause clause2 = clause("name", DataModel.Operator.in, LDValue.of("bob")); + SegmentRule rule = segmentRuleBuilder().clauses(clause1, clause2).build(); + Segment s = segmentBuilder("test") + .salt("abcdef") + .rules(rule) + .build(); + LDContext c = LDContext.builder("foo").set("email", "test@example.com").name("bob").build(); + + assertTrue(segmentMatchesContext(s, c)); + } + + @Test + public void nonMatchingRuleWithMultipleClauses() { + Clause clause1 = clause("email", DataModel.Operator.in, LDValue.of("test@example.com")); + Clause clause2 = clause("name", DataModel.Operator.in, LDValue.of("bill")); + SegmentRule rule = segmentRuleBuilder().clauses(clause1, clause2).build(); + Segment s = segmentBuilder("test") + .salt("abcdef") + .rules(rule) + .build(); + LDContext c = LDContext.builder("foo").set("email", "test@example.com").name("bob").build(); + + assertFalse(segmentMatchesContext(s, c)); + } + + @Test + public void rolloutUsesCorrectBucketValue() { + LDContext c = LDContext.create("foo"); + testRolloutBucketing("foo", c, null, null); + } + + @Test + public void rolloutUsesContextKind() { + LDContext c1 = LDContext.create(ContextKind.of("kind1"), "foo"); + LDContext c2 = LDContext.create(ContextKind.of("kind2"), "bar"); + LDContext multi = LDContext.createMulti(c1, c2); + testRolloutBucketing("foo", multi, ContextKind.of("kind1"), null); + } + + @Test + public void rolloutUsesBucketBy() { + LDContext c = LDContext.builder("xxx").set("attr1", LDValue.parse("{\"prop1\":\"foo\"}")).build(); + testRolloutBucketing("foo", c, null, AttributeRef.fromPath("/attr1/prop1")); + } + + private void testRolloutBucketing(String bucketByValue, LDContext context, ContextKind contextKind, AttributeRef bucketBy) { + float expectedBucketValue = computeBucketValue(false, null, LDContext.create(bucketByValue), null, + SEGMENT_KEY, null, ARBITRARY_SALT); + int bucketValueAsInt = (int)(expectedBucketValue * 100000); + Clause clause = clauseMatchingContext(context); + + // When a segment rule has a weight, it matches only if the bucket value for the context (as an int + // from 0 to 100000) is *less than* that weight. So, to roughly verify that the right bucket value + // is being used, first we check that a rule with that value plus 1 is a match... + Segment s1 = baseSegmentBuilder() + .rules(segmentRuleBuilder().clauses(clause).weight(bucketValueAsInt + 1) + .rolloutContextKind(contextKind).bucketBy(bucketBy).build()) + .build(); + assertTrue(segmentMatchesContext(s1, context)); + + // ...and then, that a rule with that value minus 1 is not a match. + Segment s2 = baseSegmentBuilder() + .rules(segmentRuleBuilder().clauses(clause).weight(bucketValueAsInt - 1) + .rolloutContextKind(contextKind).bucketBy(bucketBy).build()) + .build(); + assertFalse(segmentMatchesContext(s2, context)); + } + + @Test + public void segmentReferencingSegment() { + LDContext context = LDContext.create("foo"); + Segment segment0 = segmentBuilder("segmentkey0") + .rules(segmentRuleBuilder().clauses(clauseMatchingSegment("segmentkey1")).build()) + .build(); + Segment segment1 = segmentBuilder("segmentkey1") + .included(context.getKey()) + .build(); + FeatureFlag flag = booleanFlagWithClauses("flag", clauseMatchingSegment(segment0)); + + Evaluator e = evaluatorBuilder().withStoredSegments(segment0, segment1).build(); + EvalResult result = e.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertTrue(result.getValue().booleanValue()); + } + + @Test + public void segmentCycleDetection() { + for (int depth = 1; depth <= 4; depth++) { + String[] segmentKeys = new String[depth]; + for (int i = 0; i < depth; i++) { + segmentKeys[i] = "segmentkey" + i; + } + Segment[] segments = new Segment[depth]; + for (int i = 0; i < depth; i++) { + segments[i] = segmentBuilder(segmentKeys[i]) + .rules( + segmentRuleBuilder().clauses( + clauseMatchingSegment(segmentKeys[(i + 1) % depth]) + ).build() + ) + .build(); + } + + FeatureFlag flag = booleanFlagWithClauses("flag", clauseMatchingSegment(segments[0])); + Evaluator e = evaluatorBuilder().withStoredSegments(segments).build(); + + LDContext context = LDContext.create("foo"); + EvalResult result = e.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertEquals(EvalResult.error(ErrorKind.MALFORMED_FLAG), result); + } + } + + @Test + public void sameSegmentInMultipleSegmentRules() { + LDContext context = LDContext.create("foo"); + Segment reusedSegment = baseSegmentBuilder() + .rules( + segmentRuleBuilder().clauses(clauseMatchingContext(context)).build() + ) + .build(); + DataModel.Rule rule0 = ruleBuilder().id("ruleid0").clauses(negateClause(clauseMatchingSegment(reusedSegment))).variation(0).build(); + DataModel.Rule rule1 = ruleBuilder().id("ruleid1").clauses(clauseMatchingSegment(reusedSegment)).variation(1).build(); + + DataModel.FeatureFlag flag = flagBuilder("flag") + .on(true) + .rules(rule0,rule1) + .fallthroughVariation(0) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)).build(); + + Evaluator e = evaluatorBuilder().withStoredSegments(reusedSegment).build(); + EvalResult result = e.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertEquals(EvaluationReason.ruleMatch(1, "ruleid1"), result.getReason()); + assertTrue(result.getValue().booleanValue()); + } + private static SegmentBuilder baseSegmentBuilder() { + return segmentBuilder(SEGMENT_KEY).version(1).salt(ARBITRARY_SALT); + } + + private boolean segmentMatchesContext(Segment segment, LDContext context) { + FeatureFlag flag = booleanFlagWithClauses("flag", clauseMatchingSegment(segment)); + Evaluator e = evaluatorBuilder().withStoredSegments(segment).build(); + return e.evaluate(flag, context, expectNoPrerequisiteEvals()).getValue().booleanValue(); + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTargetTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTargetTest.java new file mode 100644 index 0000000..8d30096 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTargetTest.java @@ -0,0 +1,104 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.ModelBuilders.FlagBuilder; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.target; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +public class EvaluatorTargetTest { + private static final int FALLTHROUGH_VAR = 0, MATCH_VAR_1 = 1, MATCH_VAR_2 = 2; + private static final LDValue[] VARIATIONS = new LDValue[] { + LDValue.of("fallthrough"), LDValue.of("match1"), LDValue.of("match2") + }; + private static final ContextKind CAT_KIND = ContextKind.of("cat"); + private static final ContextKind DOG_KIND = ContextKind.of("dog"); + + @Test + public void userTargetsOnly() throws Exception { + FeatureFlag f = baseFlagBuilder() + .targets( + target(MATCH_VAR_1, "c"), + target(MATCH_VAR_2, "b", "a") + ) + .build(); + + expectMatch(f, user("a"), MATCH_VAR_2); + expectMatch(f, user("b"), MATCH_VAR_2); + expectMatch(f, user("c"), MATCH_VAR_1); + expectFallthrough(f, user("z")); + + // in a multi-kind context, these targets match only the key for the user kind + expectMatch(f, LDContext.createMulti(dog("b"), user("a")), MATCH_VAR_2); + expectMatch(f, LDContext.createMulti(dog("a"), user("c")), MATCH_VAR_1); + expectFallthrough(f, LDContext.createMulti(dog("b"), user("z"))); + expectFallthrough(f, LDContext.createMulti(dog("a"), cat("b"))); + } + + @Test + public void userTargetsAndContextTargets() throws Exception { + FeatureFlag f = baseFlagBuilder() + .targets( + target(MATCH_VAR_1, "c"), + target(MATCH_VAR_2, "b", "a") + ) + .contextTargets( + target(DOG_KIND, MATCH_VAR_1, "a", "b"), + target(DOG_KIND, MATCH_VAR_2, "c"), + target(ContextKind.DEFAULT, MATCH_VAR_1), + target(ContextKind.DEFAULT, MATCH_VAR_2) + ) + .build(); + + expectMatch(f, user("a"), MATCH_VAR_2); + expectMatch(f, user("b"), MATCH_VAR_2); + expectMatch(f, user("c"), MATCH_VAR_1); + expectFallthrough(f, user("z")); + + expectMatch(f, LDContext.createMulti(dog("b"), user("a")), MATCH_VAR_1); // the "dog" target takes precedence due to ordering + expectMatch(f, LDContext.createMulti(dog("z"), user("a")), MATCH_VAR_2); // "dog" targets don't match, continue to "user" targets + expectFallthrough(f, LDContext.createMulti(dog("x"), user("z"))); // nothing matches + expectMatch(f, LDContext.createMulti(dog("a"), cat("b")), MATCH_VAR_1); + } + + private static FlagBuilder baseFlagBuilder() { + return flagBuilder("feature").on(true).variations(VARIATIONS) + .fallthroughVariation(FALLTHROUGH_VAR).offVariation(FALLTHROUGH_VAR); + } + + private static void expectMatch(FeatureFlag f, LDContext c, int v) { + EvalResult result = BASE_EVALUATOR.evaluate(f, c, expectNoPrerequisiteEvals()); + assertThat(result.getVariationIndex(), equalTo(v)); + assertThat(result.getValue(), equalTo(VARIATIONS[v])); + assertThat(result.getReason(), equalTo(EvaluationReason.targetMatch())); + } + + private static void expectFallthrough(FeatureFlag f, LDContext c) { + EvalResult result = BASE_EVALUATOR.evaluate(f, c, expectNoPrerequisiteEvals()); + assertThat(result.getVariationIndex(), equalTo(FALLTHROUGH_VAR)); + assertThat(result.getValue(), equalTo(VARIATIONS[FALLTHROUGH_VAR])); + assertThat(result.getReason(), equalTo(EvaluationReason.fallthrough())); + } + + private static LDContext user(String key) { + return LDContext.create(key); + } + + private static LDContext cat(String key) { + return LDContext.create(CAT_KIND, key); + } + + private static LDContext dog(String key) { + return LDContext.create(DOG_KIND, key); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTest.java new file mode 100644 index 0000000..0ecafe1 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTest.java @@ -0,0 +1,483 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.Iterables; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.VariationOrRollout; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; +import com.launchdarkly.sdk.server.EvaluatorTestUtil.PrereqEval; +import com.launchdarkly.sdk.server.EvaluatorTestUtil.PrereqRecorder; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.FALLTHROUGH_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.FALLTHROUGH_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.GREEN_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.GREEN_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.MATCH_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.MATCH_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.OFF_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.OFF_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.RED_VALUE; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.RED_VARIATION; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.buildRedGreenFlag; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.buildThreeWayFlag; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class EvaluatorTest extends EvaluatorTestBase { + private static final LDContext BASE_USER = LDContext.create("x"); + + private static Rollout buildRollout(boolean isExperiment, boolean untrackedVariations) { + List variations = new ArrayList<>(); + variations.add(new WeightedVariation(1, 50000, untrackedVariations)); + variations.add(new WeightedVariation(2, 50000, untrackedVariations)); + RolloutKind kind = isExperiment ? RolloutKind.experiment : RolloutKind.rollout; + Integer seed = 123; + Rollout rollout = new Rollout(null, variations, null, kind, seed); + return rollout; + } + + @Test + public void flagReturnsOffVariationIfFlagIsOff() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(false) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, EvaluationReason.off()), result); + } + + @Test + public void flagReturnsNullIfFlagIsOffAndOffVariationIsUnspecified() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(false) + .offVariation(null) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.of(LDValue.ofNull(), NO_VARIATION, EvaluationReason.off()), result); + } + + @Test + public void flagReturnsErrorIfFlagIsOffAndOffVariationIsTooHigh() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(false) + .offVariation(999) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsErrorIfFlagIsOffAndOffVariationIsNegative() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(false) + .offVariation(-1) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsInExperimentForFallthroughWhenInExperimentVariation() throws Exception { + Rollout rollout = buildRollout(true, false); + VariationOrRollout vr = new VariationOrRollout(null, rollout); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthrough(vr) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assert(result.getReason().isInExperiment()); + } + + @Test + public void flagReturnsNotInExperimentForFallthroughWhenNotInExperimentVariation() throws Exception { + Rollout rollout = buildRollout(true, true); + VariationOrRollout vr = new VariationOrRollout(null, rollout); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthrough(vr) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assert(!result.getReason().isInExperiment()); + } + + @Test + public void flagReturnsNotInExperimentForFallthrougWhenInExperimentVariationButNonExperimentRollout() throws Exception { + Rollout rollout = buildRollout(false, false); + VariationOrRollout vr = new VariationOrRollout(null, rollout); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthrough(vr) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assert(!result.getReason().isInExperiment()); + } + + @Test + public void flagReturnsInExperimentForRuleMatchWhenInExperimentVariation() throws Exception { + Rollout rollout = buildRollout(true, false); + + DataModel.Rule rule = ruleBuilder().id("ruleid0").clauses(clauseMatchingContext(BASE_USER)) + .rollout(rollout).build(); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .rules(rule) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assert(result.getReason().isInExperiment()); + } + + @Test + public void flagReturnsNotInExperimentForRuleMatchWhenNotInExperimentVariation() throws Exception { + Rollout rollout = buildRollout(true, true); + + DataModel.Rule rule = ruleBuilder().id("ruleid0").clauses(clauseMatchingContext(BASE_USER)) + .rollout(rollout).build(); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .rules(rule) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assert(!result.getReason().isInExperiment()); + } + + @Test + public void flagReturnsNotInExperimentWhenContextKindIsNotFound() throws Exception { + Rollout rollout = new Rollout( + ContextKind.of("nonexistent"), + Arrays.asList( + new WeightedVariation(0, 1, false), + new WeightedVariation(1, 99999, false) + ), + null, + RolloutKind.experiment, + null); + + DataModel.Rule rule = ruleBuilder().id("ruleid0").clauses(clauseMatchingContext(BASE_USER)) + .rollout(rollout).build(); + DataModel.FeatureFlag flagWithRule = buildThreeWayFlag("feature") + .on(true) + .rules(rule) + .build(); + EvalResult result1 = BASE_EVALUATOR.evaluate(flagWithRule, BASE_USER, expectNoPrerequisiteEvals()); + assert(!result1.getReason().isInExperiment()); + + DataModel.FeatureFlag flagWithFallthrough = buildThreeWayFlag("feature") + .on(true) + .fallthrough(rollout) + .rules(rule) + .build(); + EvalResult result2 = BASE_EVALUATOR.evaluate(flagWithFallthrough, BASE_USER, expectNoPrerequisiteEvals()); + assert(!result2.getReason().isInExperiment()); + + } + + @Test + public void flagReturnsFallthroughIfFlagIsOnAndThereAreNoRules() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()), result); + } + + @Test + public void fallthroughResultHasForceReasonTrackingTrueIfTrackEventsFallthroughIstrue() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .trackEventsFallthrough(true) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals( + EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()) + .withForceReasonTracking(true), + result); + } + + @Test + public void flagReturnsErrorIfFallthroughHasTooHighVariation() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthroughVariation(999) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsErrorIfFallthroughHasNegativeVariation() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthroughVariation(-1) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsErrorIfFallthroughHasNeitherVariationNorRollout() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthrough(new DataModel.VariationOrRollout(null, null)) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsErrorIfFallthroughHasEmptyRolloutVariationList() throws Exception { + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .fallthrough(new DataModel.VariationOrRollout(null, ModelBuilders.emptyRollout())) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(f, BASE_USER, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.error(EvaluationReason.ErrorKind.MALFORMED_FLAG), result); + } + + @Test + public void flagReturnsOffVariationIfPrerequisiteIsNotFound() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .build(); + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + } + + @Test + public void flagReturnsOffVariationAndEventIfPrerequisiteIsOff() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + DataModel.FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(false) + .offVariation(GREEN_VARIATION) + // note that even though it returns the desired variation, it is still off and therefore not a match + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval.result.getValue()); + } + + @Test + public void flagReturnsOffVariationAndEventIfPrerequisiteIsNotMet() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + DataModel.FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .fallthroughVariation(RED_VARIATION) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(EvalResult.of(OFF_VALUE, OFF_VARIATION, expectedReason), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(RED_VARIATION, eval.result.getVariationIndex()); + assertEquals(RED_VALUE, eval.result.getValue()); + } + + @Test + public void prerequisiteFailedResultInstanceIsReusedForSamePrerequisite() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result0 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult result1 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(expectedReason, result0.getReason()); + assertSame(result0, result1); + } + + @Test + public void prerequisiteFailedReasonInstanceCanBeCreatedFromScratch() throws Exception { + // Normally we will always do the preprocessing step that creates the reason instances ahead of time, + // but if somehow we didn't, it should create them as needed + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .disablePreprocessing(true) + .build(); + assertNull(f0.getPrerequisites().get(0).preprocessed); + + Evaluator e = evaluatorBuilder().withNonexistentFlag("feature1").build(); + EvalResult result0 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + EvalResult result1 = e.evaluate(f0, BASE_USER, expectNoPrerequisiteEvals()); + + EvaluationReason expectedReason = EvaluationReason.prerequisiteFailed("feature1"); + assertEquals(expectedReason, result0.getReason()); + assertNotSame(result0.getReason(), result1.getReason()); // they were created individually + assertEquals(result0.getReason(), result1.getReason()); // but they're equal + } + + @Test + public void flagReturnsFallthroughVariationAndEventIfPrerequisiteIsMetAndThereAreNoRules() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + DataModel.FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .fallthroughVariation(GREEN_VARIATION) + .version(2) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + assertEquals(EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()), result); + + assertEquals(1, Iterables.size(recordPrereqs.evals)); + PrereqEval eval = recordPrereqs.evals.get(0); + assertEquals(f1, eval.flag); + assertEquals(f0, eval.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval.result.getValue()); + } + + @Test + public void multipleLevelsOfPrerequisitesProduceMultipleEvents() throws Exception { + DataModel.FeatureFlag f0 = buildThreeWayFlag("feature") + .on(true) + .prerequisites(prerequisite("feature1", GREEN_VARIATION)) + .build(); + DataModel.FeatureFlag f1 = buildRedGreenFlag("feature1") + .on(true) + .prerequisites(prerequisite("feature2", GREEN_VARIATION)) + .fallthroughVariation(GREEN_VARIATION) + .build(); + DataModel.FeatureFlag f2 = buildRedGreenFlag("feature2") + .on(true) + .fallthroughVariation(GREEN_VARIATION) + .build(); + Evaluator e = evaluatorBuilder().withStoredFlags(f1, f2).build(); + PrereqRecorder recordPrereqs = new PrereqRecorder(); + EvalResult result = e.evaluate(f0, BASE_USER, recordPrereqs); + + assertEquals(EvalResult.of(FALLTHROUGH_VALUE, FALLTHROUGH_VARIATION, EvaluationReason.fallthrough()), result); + + assertEquals(2, Iterables.size(recordPrereqs.evals)); + + PrereqEval eval0 = recordPrereqs.evals.get(0); + assertEquals(f2, eval0.flag); + assertEquals(f1, eval0.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval0.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval0.result.getValue()); + + PrereqEval eval1 = recordPrereqs.evals.get(1); + assertEquals(f1, eval1.flag); + assertEquals(f0, eval1.prereqOfFlag); + assertEquals(GREEN_VARIATION, eval1.result.getVariationIndex()); + assertEquals(GREEN_VALUE, eval1.result.getValue()); + } + + @Test + public void flagMatchesUserFromRules() { + DataModel.Clause clause0 = clause("key", DataModel.Operator.in, LDValue.of("wrongkey")); + DataModel.Clause clause1 = clause("key", DataModel.Operator.in, LDValue.of("userkey")); + DataModel.Rule rule0 = ruleBuilder().id("ruleid0").clauses(clause0).variation(2).build(); + DataModel.Rule rule1 = ruleBuilder().id("ruleid1").clauses(clause1).variation(2).build(); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .rules(rule0, rule1) + .build(); + + LDContext user = LDContext.create("userkey"); + EvalResult result = BASE_EVALUATOR.evaluate(f, user, expectNoPrerequisiteEvals()); + + assertEquals(EvalResult.of(MATCH_VALUE, MATCH_VARIATION, EvaluationReason.ruleMatch(1, "ruleid1")), result); + } + + @Test + public void ruleMatchReasonHasTrackReasonTrueIfRuleLevelTrackEventsIsTrue() { + DataModel.Clause clause0 = clause("key", DataModel.Operator.in, LDValue.of("wrongkey")); + DataModel.Clause clause1 = clause("key", DataModel.Operator.in, LDValue.of("userkey")); + DataModel.Rule rule0 = ruleBuilder().id("ruleid0").clauses(clause0).variation(2).build(); + DataModel.Rule rule1 = ruleBuilder().id("ruleid1").clauses(clause1).variation(2) + .trackEvents(true).build(); + + DataModel.FeatureFlag f = buildThreeWayFlag("feature") + .on(true) + .rules(rule0, rule1) + .build(); + + LDContext user = LDContext.create("userkey"); + EvalResult result = BASE_EVALUATOR.evaluate(f, user, expectNoPrerequisiteEvals()); + + assertEquals( + EvalResult.of(MATCH_VALUE, MATCH_VARIATION, EvaluationReason.ruleMatch(1, "ruleid1")) + .withForceReasonTracking(true), + result); + } + + @Test(expected=RuntimeException.class) + public void canSimulateErrorUsingTestInstrumentationFlagKey() { + // Other tests rely on the ability to simulate an exception in this way + DataModel.FeatureFlag badFlag = flagBuilder(Evaluator.INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION).build(); + BASE_EVALUATOR.evaluate(badFlag, BASE_USER, expectNoPrerequisiteEvals()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestBase.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestBase.java new file mode 100644 index 0000000..5d2a702 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestBase.java @@ -0,0 +1,10 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.EvaluatorTestUtil.EvaluatorBuilder; + +@SuppressWarnings("javadoc") +public class EvaluatorTestBase extends BaseTest { + public EvaluatorBuilder evaluatorBuilder() { + return new EvaluatorBuilder(testLogger); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestUtil.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestUtil.java new file mode 100644 index 0000000..33849be --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorTestUtil.java @@ -0,0 +1,164 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.BigSegmentStoreWrapper.BigSegmentsQueryResult; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.ModelBuilders.FlagBuilder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; + +@SuppressWarnings("javadoc") +public abstract class EvaluatorTestUtil { + public static final LDContext BASE_USER = LDContext.create("x"); + + // These constants and flag builders define two kinds of flag: one with three variations-- allowing us to + // distinguish between match, fallthrough, and off results-- and one with two. + public static final int OFF_VARIATION = 0; + public static final LDValue OFF_VALUE = LDValue.of("off"); + public static final int FALLTHROUGH_VARIATION = 1; + public static final LDValue FALLTHROUGH_VALUE = LDValue.of("fall"); + public static final int MATCH_VARIATION = 2; + public static final LDValue MATCH_VALUE = LDValue.of("match"); + public static final LDValue[] THREE_VARIATIONS = new LDValue[] { OFF_VALUE, FALLTHROUGH_VALUE, MATCH_VALUE }; + + public static final int RED_VARIATION = 0; + public static final LDValue RED_VALUE = LDValue.of("red"); + public static final int GREEN_VARIATION = 1; + public static final LDValue GREEN_VALUE = LDValue.of("green"); + public static final LDValue[] RED_GREEN_VARIATIONS = new LDValue[] { RED_VALUE, GREEN_VALUE }; + + public static FlagBuilder buildThreeWayFlag(String flagKey) { + return flagBuilder(flagKey) + .fallthroughVariation(FALLTHROUGH_VARIATION) + .offVariation(OFF_VARIATION) + .variations(THREE_VARIATIONS) + .version(versionFromKey(flagKey)); + } + + public static FlagBuilder buildRedGreenFlag(String flagKey) { + return flagBuilder(flagKey) + .fallthroughVariation(GREEN_VARIATION) + .offVariation(RED_VARIATION) + .variations(RED_GREEN_VARIATIONS) + .version(versionFromKey(flagKey)); + } + + public static int versionFromKey(String flagKey) { + return Math.abs(flagKey.hashCode()); + } + + public static EvaluatorBuilder evaluatorBuilder() { + return new EvaluatorBuilder(); + } + + public static Evaluator BASE_EVALUATOR = new EvaluatorBuilder().build(); + + public static class EvaluatorBuilder { + HashMap flagMap = new HashMap<>(); + HashMap segmentMap = new HashMap<>(); + HashMap bigSegmentMap = new HashMap<>(); + private final LDLogger logger; + + EvaluatorBuilder() { + this(LDLogger.withAdapter(Logs.none(), "")); + } + + EvaluatorBuilder(LDLogger logger) { + this.logger = logger; + } + + public Evaluator build() { + return new Evaluator(new Evaluator.Getters() { + public DataModel.FeatureFlag getFlag(String key) { + if (!flagMap.containsKey(key)) { + throw new IllegalStateException("Evaluator unexpectedly tried to query flag: " + key); + } + return flagMap.get(key); + } + + public DataModel.Segment getSegment(String key) { + if (!segmentMap.containsKey(key)) { + throw new IllegalStateException("Evaluator unexpectedly tried to query segment: " + key); + } + return segmentMap.get(key); + } + + public BigSegmentsQueryResult getBigSegments(String key) { + if (!bigSegmentMap.containsKey(key)) { + throw new IllegalStateException("Evaluator unexpectedly tried to query Big Segment: " + key); + } + return bigSegmentMap.get(key); + } + }, logger); + } + + public EvaluatorBuilder withStoredFlags(final DataModel.FeatureFlag... flags) { + for (DataModel.FeatureFlag f: flags) { + flagMap.put(f.getKey(), f); + } + return this; + } + + public EvaluatorBuilder withNonexistentFlag(final String nonexistentFlagKey) { + flagMap.put(nonexistentFlagKey, null); + return this; + } + + public EvaluatorBuilder withStoredSegments(final DataModel.Segment... segments) { + for (DataModel.Segment s: segments) { + segmentMap.put(s.getKey(), s); + } + return this; + } + + public EvaluatorBuilder withNonexistentSegment(final String nonexistentSegmentKey) { + segmentMap.put(nonexistentSegmentKey, null); + return this; + } + + public EvaluatorBuilder withBigSegmentQueryResult(final String userKey, BigSegmentsQueryResult queryResult) { + bigSegmentMap.put(userKey, queryResult); + return this; + } + } + + public static EvaluationRecorder expectNoPrerequisiteEvals() { + return new EvaluationRecorder() { + @Override + public void recordPrerequisiteEvaluation(FeatureFlag flag, FeatureFlag prereqOfFlag, LDContext context, EvalResult result) { + throw new AssertionError("did not expect any prerequisite evaluations, but got one"); + } + }; + } + + public static final class PrereqEval { + public final FeatureFlag flag; + public final FeatureFlag prereqOfFlag; + public final LDContext context; + public final EvalResult result; + + public PrereqEval(FeatureFlag flag, FeatureFlag prereqOfFlag, LDContext context, EvalResult result) { + this.flag = flag; + this.prereqOfFlag = prereqOfFlag; + this.context = context; + this.result = result; + } + } + + public static final class PrereqRecorder implements EvaluationRecorder { + public final List evals = new ArrayList<>(); + + @Override + public void recordPrerequisiteEvaluation(FeatureFlag flag, FeatureFlag prereqOfFlag, LDContext context, + EvalResult result) { + evals.add(new PrereqEval(flag, prereqOfFlag, context, result)); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorWithHookTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorWithHookTest.java new file mode 100644 index 0000000..e88625f --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EvaluatorWithHookTest.java @@ -0,0 +1,196 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.server.integrations.Hook; +import com.launchdarkly.sdk.server.integrations.HookMetadata; +import org.junit.Test; +import org.mockito.stubbing.Answer; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class EvaluatorWithHookTest { + + @Test + public void beforeIsExecutedBeforeAfter() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + Hook mockHook = mock(Hook.class); + AtomicBoolean beforeCalled = new AtomicBoolean(false); + when(mockHook.beforeEvaluation(any(), any())).thenAnswer((Answer>) invocation -> { + beforeCalled.set(true); + return Collections.emptyMap(); + }); + + when(mockHook.afterEvaluation(any(), any(), any())).thenAnswer((Answer>) invocation -> { + assertTrue(beforeCalled.get()); + return Collections.emptyMap(); + }); + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Collections.singletonList(mockHook), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + } + + @Test + public void evaluationResultIsPassedToAfter() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + Hook mockHook = mock(Hook.class); + when(mockHook.beforeEvaluation(any(), any())).thenReturn(Collections.emptyMap()); + when(mockHook.afterEvaluation(any(), any(), any())).thenReturn(Collections.emptyMap()); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Collections.singletonList(mockHook), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + + verify(mockHook).afterEvaluation(any(), any(), eq(EvaluationDetail.fromValue(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()))); + } + + @Test + public void afterExecutesInReverseOrder() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + List calls = new ArrayList<>(); + + Hook mockHookA = mock(Hook.class); + when(mockHookA.beforeEvaluation(any(), any())).thenAnswer(invocation -> { + calls.add("hookABefore"); + return Collections.emptyMap(); + }); + when(mockHookA.afterEvaluation(any(), any(), any())).thenAnswer(invocation -> { + calls.add("hookAAfter"); + return Collections.emptyMap(); + }); + + Hook mockHookB = mock(Hook.class); + when(mockHookB.beforeEvaluation(any(), any())).thenAnswer(invocation -> { + calls.add("hookBBefore"); + return Collections.emptyMap(); + }); + when(mockHookB.afterEvaluation(any(), any(), any())).thenAnswer(invocation -> { + calls.add("hookBAfter"); + return Collections.emptyMap(); + }); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Arrays.asList(mockHookA, mockHookB), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + assertEquals(calls, Arrays.asList("hookABefore", "hookBBefore", "hookBAfter", "hookAAfter")); + } + + @Test + public void beforeIsGivenEmptySeriesData() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + Hook mockHook = mock(Hook.class); + when(mockHook.beforeEvaluation(any(), any())).thenReturn(Collections.emptyMap()); + when(mockHook.afterEvaluation(any(), any(), eq(evalResult.getResult().getAnyType()))).thenReturn(Collections.emptyMap()); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Collections.singletonList(mockHook), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + + verify(mockHook).beforeEvaluation(any(), eq(Collections.emptyMap())); + } + + @Test + public void seriesDataFromBeforeIsPassedToAfter() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + Hook mockHook = mock(Hook.class); + Map mockData = new HashMap<>(); + mockData.put("dataKey", "dataValue"); + when(mockHook.beforeEvaluation(any(), any())).thenReturn(mockData); + when(mockHook.afterEvaluation(any(), any(), any())).thenReturn(Collections.emptyMap()); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Collections.singletonList(mockHook), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + + verify(mockHook).afterEvaluation(any(), eq(mockData), any()); + } + + @Test + public void beforeThrowingErrorLeadsToEmptySeriesDataPassedToAfter() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + Hook mockHook = mock(Hook.class); + when(mockHook.beforeEvaluation(any(), any())).thenAnswer(invocation -> { + throw new Exception("Exceptions for everyone!"); + }); + when(mockHook.getMetadata()).thenReturn(new HookMetadata("mockHookName") {}); + when(mockHook.afterEvaluation(any(), any(), any())).thenReturn(Collections.emptyMap()); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Collections.singletonList(mockHook), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + + verify(mockHook, times(1)).getMetadata(); + verify(mockHook).afterEvaluation(any(), eq(Collections.emptyMap()), any()); + } + + @Test + public void oneHookThrowingErrorDoesNotAffectOtherHooks() { + EvalResultAndFlag evalResult = new EvalResultAndFlag(EvalResult.of(LDValue.of("aValue"), 0, EvaluationReason.fallthrough()), null); + EvaluatorInterface mockEvaluator = mock(EvaluatorInterface.class); + when(mockEvaluator.evalAndFlag(any(), any(), any(), any(), any(), any())).thenReturn(evalResult); + + List calls = new ArrayList<>(); + + Hook mockHookA = mock(Hook.class); + when(mockHookA.beforeEvaluation(any(), any())).thenAnswer(invocation -> { + calls.add("hookABefore"); + throw new Exception("Exceptions for everyone!"); + }); + // after will get an empty map which is the default series data when an exception occurs in before + when(mockHookA.afterEvaluation(any(), any(), any())).thenAnswer(invocation -> { + calls.add("hookAAfter"); + return Collections.emptyMap(); + }); + when(mockHookA.getMetadata()).thenReturn(new HookMetadata("mockHookA") {}); + + Hook mockHookB = mock(Hook.class); + Map mockData = new HashMap<>(); + mockData.put("dataKey", "dataValue"); + when(mockHookB.beforeEvaluation(any(), any())).thenAnswer(invocation -> { + calls.add("hookBBefore"); + return mockData; + }); + when(mockHookB.afterEvaluation(any(), any(), any())).thenAnswer(invocation -> { + calls.add("hookBAfter"); + return Collections.emptyMap(); + }); + + EvaluatorWithHooks evaluatorUnderTest = new EvaluatorWithHooks(mockEvaluator, Arrays.asList(mockHookA, mockHookB), LDLogger.none()); + evaluatorUnderTest.evalAndFlag("aMethod", "aKey", LDContext.create("aKey"), LDValue.of("aDefault"), LDValueType.STRING, EvaluationOptions.NO_EVENTS); + assertEquals(calls, Arrays.asList("hookABefore", "hookBBefore", "hookBAfter", "hookAAfter")); + + verify(mockHookA).afterEvaluation(any(), eq(Collections.emptyMap()), any()); + verify(mockHookB).afterEvaluation(any(), eq(mockData), any()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EventBroadcasterImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EventBroadcasterImplTest.java new file mode 100644 index 0000000..bfe25b1 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/EventBroadcasterImplTest.java @@ -0,0 +1,124 @@ +package com.launchdarkly.sdk.server; + +import org.junit.Test; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +@SuppressWarnings("javadoc") +public class EventBroadcasterImplTest extends BaseTest { + private EventBroadcasterImpl broadcaster = + new EventBroadcasterImpl<>(FakeListener::sendEvent, sharedExecutor, testLogger); + + @Test + public void sendingEventWithNoListenersDoesNotCauseError() { + broadcaster.broadcast(new FakeEvent()); + } + + @Test + public void sendingEventWithNoExecutorDoesNotCauseError() { + new EventBroadcasterImpl<>(FakeListener::sendEvent, null, testLogger).broadcast(new FakeEvent()); + } + + @Test + public void hasListeners() { + assertThat(broadcaster.hasListeners(), is(false)); + + FakeListener listener1 = e -> {}; + FakeListener listener2 = e -> {}; + broadcaster.register(listener1); + broadcaster.register(listener2); + + assertThat(broadcaster.hasListeners(), is(true)); + + broadcaster.unregister(listener1); + + assertThat(broadcaster.hasListeners(), is(true)); + + broadcaster.unregister(listener2); + + assertThat(broadcaster.hasListeners(), is(false)); + } + + @Test + public void allListenersReceiveEvent() throws Exception { + BlockingQueue events1 = new LinkedBlockingQueue<>(); + BlockingQueue events2 = new LinkedBlockingQueue<>(); + FakeListener listener1 = events1::add; + FakeListener listener2 = events2::add; + broadcaster.register(listener1); + broadcaster.register(listener2); + + FakeEvent e1 = new FakeEvent(); + FakeEvent e2 = new FakeEvent(); + + broadcaster.broadcast(e1); + broadcaster.broadcast(e2); + + assertThat(events1.take(), is(e1)); + assertThat(events1.take(), is(e2)); + assertThat(events1.isEmpty(), is(true)); + + assertThat(events2.take(), is(e1)); + assertThat(events2.take(), is(e2)); + assertThat(events2.isEmpty(), is(true)); + } + + @Test + public void canUnregisterListener() throws Exception { + BlockingQueue events1 = new LinkedBlockingQueue<>(); + BlockingQueue events2 = new LinkedBlockingQueue<>(); + FakeListener listener1 = events1::add; + FakeListener listener2 = events2::add; + broadcaster.register(listener1); + broadcaster.register(listener2); + + FakeEvent e1 = new FakeEvent(); + FakeEvent e2 = new FakeEvent(); + FakeEvent e3 = new FakeEvent(); + + broadcaster.broadcast(e1); + + broadcaster.unregister(listener2); + broadcaster.broadcast(e2); + + broadcaster.register(listener2); + broadcaster.broadcast(e3); + + assertThat(events1.take(), is(e1)); + assertThat(events1.take(), is(e2)); + assertThat(events1.take(), is(e3)); + assertThat(events1.isEmpty(), is(true)); + + assertThat(events2.take(), is(e1)); + assertThat(events2.take(), is(e3)); // did not get e2 + assertThat(events2.isEmpty(), is(true)); + } + + @Test + public void exceptionFromEarlierListenerDoesNotInterfereWithLaterListener() throws Exception { + FakeListener listener1 = e -> { + throw new RuntimeException("sorry"); + }; + broadcaster.register(listener1); + + BlockingQueue events2 = new LinkedBlockingQueue<>(); + FakeListener listener2 = events2::add; + broadcaster.register(listener2); + + FakeEvent e = new FakeEvent(); + broadcaster.broadcast(e); + + assertThat(events2.take(), is(e)); + } + + static class FakeEvent {} + + static interface FakeListener { + void sendEvent(FakeEvent e); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FeatureFlagsStateTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FeatureFlagsStateTest.java new file mode 100644 index 0000000..dc67b32 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FeatureFlagsStateTest.java @@ -0,0 +1,225 @@ +package com.launchdarkly.sdk.server; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.json.JsonSerialization; +import com.launchdarkly.sdk.json.LDJackson; +import com.launchdarkly.sdk.json.SerializationException; +import com.launchdarkly.testhelpers.TypeBehavior; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.EvaluationReason.ErrorKind.MALFORMED_FLAG; +import static com.launchdarkly.sdk.server.FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS; +import static com.launchdarkly.sdk.server.FlagsStateOption.WITH_REASONS; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class FeatureFlagsStateTest { + @Test + public void canGetFlagValue() { + FeatureFlagsState state = FeatureFlagsState.builder() + .add("key", LDValue.of("value"), 1, null, 10, false, null) + .build(); + + assertEquals(LDValue.of("value"), state.getFlagValue("key")); + } + + @Test + public void unknownFlagReturnsNullValue() { + FeatureFlagsState state = FeatureFlagsState.builder().build(); + + assertNull(state.getFlagValue("key")); + } + + @Test + public void canGetFlagReason() { + FeatureFlagsState state = FeatureFlagsState.builder(WITH_REASONS) + .add("key", LDValue.of("value"), 1, EvaluationReason.off(), 10, false, null) + .build(); + + assertEquals(EvaluationReason.off(), state.getFlagReason("key")); + } + + @Test + public void unknownFlagReturnsNullReason() { + FeatureFlagsState state = FeatureFlagsState.builder().build(); + + assertNull(state.getFlagReason("key")); + } + + @Test + public void reasonIsNullIfReasonsWereNotRecorded() { + FeatureFlagsState state = FeatureFlagsState.builder() + .add("key", LDValue.of("value"), 1, EvaluationReason.off(), 10, false, null) + .build(); + + assertNull(state.getFlagReason("key")); + } + + @Test + public void flagIsTreatedAsTrackedIfDebugEventsUntilDateIsInFuture() { + FeatureFlagsState state = FeatureFlagsState.builder(WITH_REASONS, DETAILS_ONLY_FOR_TRACKED_FLAGS) + .add("key", LDValue.of("value"), 1, EvaluationReason.off(), 10, false, System.currentTimeMillis() + 1000000) + .build(); + + assertNotNull(state.getFlagReason("key")); + } + + @Test + public void flagIsNotTreatedAsTrackedIfDebugEventsUntilDateIsInPast() { + FeatureFlagsState state = FeatureFlagsState.builder(WITH_REASONS, DETAILS_ONLY_FOR_TRACKED_FLAGS) + .add("key", LDValue.of("value"), 1, EvaluationReason.off(), 10, false, System.currentTimeMillis() - 1000000) + .build(); + + assertNull(state.getFlagReason("key")); + } + + @Test + public void flagCanHaveNullValue() { + FeatureFlagsState state = FeatureFlagsState.builder() + .add("key", LDValue.ofNull(), 1, null, 10, false, null) + .build(); + + assertEquals(LDValue.ofNull(), state.getFlagValue("key")); + } + + @Test + public void canConvertToValuesMap() { + FeatureFlagsState state = FeatureFlagsState.builder() + .add("key1", LDValue.of("value1"), 0, null, 10, false, null) + .add("key2", LDValue.of("value2"), 1, null, 10, false, null) + .build(); + + ImmutableMap expected = ImmutableMap.of("key1", LDValue.of("value1"), "key2", LDValue.of("value2")); + assertEquals(expected, state.toValuesMap()); + } + + @Test + public void equalInstancesAreEqual() { + FeatureFlagsState justOneFlag = FeatureFlagsState.builder(WITH_REASONS) + .add("key1", LDValue.of("value1"), 0, EvaluationReason.off(), 10, false, null) + .build(); + FeatureFlagsState sameFlagsDifferentInstances1 = FeatureFlagsState.builder(WITH_REASONS) + .add("key1", LDValue.of("value1"), 0, EvaluationReason.off(), 10, false, null) + .add("key2", LDValue.of("value2"), 1, EvaluationReason.fallthrough(), 10, false, null) + .build(); + FeatureFlagsState sameFlagsDifferentInstances2 = FeatureFlagsState.builder(WITH_REASONS) + .add("key1", LDValue.of("value1"), 0, EvaluationReason.off(), 10, false, null) + .add("key2", LDValue.of("value2"), 1, EvaluationReason.fallthrough(), 10, false, null) + .build(); + FeatureFlagsState sameFlagsDifferentMetadata = FeatureFlagsState.builder(WITH_REASONS) + .add("key1", LDValue.of("value1"), 1, EvaluationReason.off(), 10, false, null) + .add("key2", LDValue.of("value2"), 1, EvaluationReason.fallthrough(), 10, false, null) + .build(); + FeatureFlagsState noFlagsButValid = FeatureFlagsState.builder(WITH_REASONS).build(); + FeatureFlagsState noFlagsAndNotValid = FeatureFlagsState.builder(WITH_REASONS).valid(false).build(); + + assertEquals(sameFlagsDifferentInstances1, sameFlagsDifferentInstances2); + assertEquals(sameFlagsDifferentInstances1.hashCode(), sameFlagsDifferentInstances2.hashCode()); + assertNotEquals(justOneFlag, sameFlagsDifferentInstances1); + assertNotEquals(sameFlagsDifferentInstances1, sameFlagsDifferentMetadata); + + assertNotEquals(noFlagsButValid, noFlagsAndNotValid); + assertNotEquals(noFlagsButValid, ""); + } + + @Test + public void equalMetadataInstancesAreEqual() { + // Testing this various cases is easier at a low level - equalInstancesAreEqual() above already + // verifies that we test for metadata equality in general + List> allPermutations = new ArrayList<>(); + for (LDValue value: new LDValue[] { LDValue.of(1), LDValue.of(2) }) { + for (Integer variation: new Integer[] { null, 0, 1 }) { + for (EvaluationReason reason: new EvaluationReason[] { null, EvaluationReason.off(), EvaluationReason.fallthrough() }) { + for (Integer version: new Integer[] { null, 10, 11 }) { + for (boolean trackEvents: new boolean[] { false, true }) { + for (boolean trackReason: new boolean[] { false, true }) { + for (Long debugEventsUntilDate: new Long[] { null, 1000L, 1001L }) { + allPermutations.add(() -> new FeatureFlagsState.FlagMetadata( + value, variation, reason, version, trackEvents, trackReason, debugEventsUntilDate)); + } + } + } + } + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void optionsHaveHumanReadableNames() { + assertEquals("CLIENT_SIDE_ONLY", FlagsStateOption.CLIENT_SIDE_ONLY.toString()); + assertEquals("WITH_REASONS", FlagsStateOption.WITH_REASONS.toString()); + assertEquals("DETAILS_ONLY_FOR_TRACKED_FLAGS", FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS.toString()); + } + + @Test + public void canConvertToJson() { + String actualJsonString = JsonSerialization.serialize(makeInstanceForSerialization()); + assertJsonEquals(makeExpectedJsonSerialization(), actualJsonString); + } + + @Test + public void canConvertFromJson() throws SerializationException { + FeatureFlagsState state = JsonSerialization.deserialize(makeExpectedJsonSerialization(), FeatureFlagsState.class); + assertEquals(makeInstanceForSerialization(), state); + } + + private static FeatureFlagsState makeInstanceForSerialization() { + EvalResult eval1 = EvalResult.of(LDValue.of("value1"), 0, EvaluationReason.off()); + DataModel.FeatureFlag flag1 = flagBuilder("key1").version(100).trackEvents(false).build(); + EvalResult eval2 = EvalResult.of(LDValue.of("value2"), 1, EvaluationReason.fallthrough()); + DataModel.FeatureFlag flag2 = flagBuilder("key2").version(200).trackEvents(true).debugEventsUntilDate(1000L).build(); + EvalResult eval3 = EvalResult.of(LDValue.ofNull(), NO_VARIATION, EvaluationReason.error(MALFORMED_FLAG)); + DataModel.FeatureFlag flag3 = flagBuilder("key3").version(300).build(); + return FeatureFlagsState.builder(FlagsStateOption.WITH_REASONS) + .addFlag(flag1, eval1).addFlag(flag2, eval2).addFlag(flag3, eval3).build(); + } + + private static String makeExpectedJsonSerialization() { + return "{\"key1\":\"value1\",\"key2\":\"value2\",\"key3\":null," + + "\"$flagsState\":{" + + "\"key1\":{" + + "\"variation\":0,\"version\":100,\"reason\":{\"kind\":\"OFF\"}" + // note, "trackEvents: false" is omitted + "},\"key2\":{" + + "\"variation\":1,\"version\":200,\"reason\":{\"kind\":\"FALLTHROUGH\"},\"trackEvents\":true,\"debugEventsUntilDate\":1000" + + "},\"key3\":{" + + "\"version\":300,\"reason\":{\"kind\":\"ERROR\",\"errorKind\":\"MALFORMED_FLAG\"}" + + "}" + + "}," + + "\"$valid\":true" + + "}"; + } + + @Test + public void canSerializeAndDeserializeWithJackson() throws Exception { + // FeatureFlagsState, being a JsonSerializable, should get the same custom serialization/deserialization + // support that is provided by java-sdk-common for Gson and Jackson. Our Gson interoperability just relies + // on the same Gson annotations that we use internally, but the Jackson adapter will only work if the + // java-server-sdk and java-sdk-common packages are configured together correctly. So we'll test that here. + // If it fails, the symptom will be something like Jackson complaining that it doesn't know how to + // instantiate the FeatureFlagsState class. + + ObjectMapper jacksonMapper = new ObjectMapper(); + jacksonMapper.registerModule(LDJackson.module()); + + String actualJsonString = jacksonMapper.writeValueAsString(makeInstanceForSerialization()); + assertJsonEquals(makeExpectedJsonSerialization(), actualJsonString); + + FeatureFlagsState state = jacksonMapper.readValue(makeExpectedJsonSerialization(), FeatureFlagsState.class); + assertEquals(makeInstanceForSerialization(), state); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagModelDeserializationTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagModelDeserializationTest.java new file mode 100644 index 0000000..c69df41 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagModelDeserializationTest.java @@ -0,0 +1,55 @@ +package com.launchdarkly.sdk.server; + +import com.google.gson.Gson; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Target; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.ModelBuilders.clause; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.target; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertNotNull; + +@SuppressWarnings("javadoc") +public class FlagModelDeserializationTest { + private static final Gson gson = new Gson(); + + // The details of the preprocessed data are verified by DataModelPreprocessingTest; here we're + // just verifying that the preprocessing is actually being done whenever we deserialize a flag. + @Test + public void preprocessingIsDoneOnDeserialization() { + FeatureFlag originalFlag = flagBuilder("flagkey") + .variations("a", "b") + .prerequisites(new Prerequisite("abc", 0)) + .targets(target(0, "x")) + .rules(ruleBuilder().clauses( + clause("key", Operator.in, LDValue.of("x"), LDValue.of("y")) + ).build()) + .build(); + String flagJson = JsonHelpers.serialize(originalFlag); + + FeatureFlag flag = gson.fromJson(flagJson, FeatureFlag.class); + assertNotNull(flag.preprocessed); + for (Prerequisite p: flag.getPrerequisites()) { + assertNotNull(p.preprocessed); + } + for (Target t: flag.getTargets()) { + assertNotNull(t.preprocessed); + } + for (Rule r: flag.getRules()) { + assertThat(r.preprocessed, notNullValue()); + for (Clause c: r.getClauses()) { + assertThat(c.preprocessed, notNullValue()); + } + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagTrackerImplTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagTrackerImplTest.java new file mode 100644 index 0000000..ebf93d6 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/FlagTrackerImplTest.java @@ -0,0 +1,107 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; +import com.launchdarkly.sdk.server.interfaces.FlagValueChangeEvent; + +import org.junit.Test; + +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class FlagTrackerImplTest extends BaseTest { + + @Test + public void flagChangeListeners() throws Exception { + String flagKey = "flagkey"; + EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forFlagChangeEvents(TestComponents.sharedExecutor, testLogger); + + FlagTrackerImpl tracker = new FlagTrackerImpl(broadcaster, null); + + BlockingQueue eventSink1 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink2 = new LinkedBlockingQueue<>(); + FlagChangeListener listener1 = eventSink1::add; + FlagChangeListener listener2 = eventSink2::add; // need to capture the method reference in a variable so it's the same instance when we unregister it + tracker.addFlagChangeListener(listener1); + tracker.addFlagChangeListener(listener2); + + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + broadcaster.broadcast(new FlagChangeEvent(flagKey)); + + FlagChangeEvent event1 = awaitValue(eventSink1, 1, TimeUnit.SECONDS); + FlagChangeEvent event2 = awaitValue(eventSink2, 1, TimeUnit.SECONDS); + assertThat(event1.getKey(), equalTo("flagkey")); + assertThat(event2.getKey(), equalTo("flagkey")); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + tracker.removeFlagChangeListener(listener1); + + broadcaster.broadcast(new FlagChangeEvent(flagKey)); + + FlagChangeEvent event3 = awaitValue(eventSink2, 1, TimeUnit.SECONDS); + assertThat(event3.getKey(), equalTo(flagKey)); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + } + + @Test + public void flagValueChangeListener() throws Exception { + String flagKey = "important-flag"; + LDContext user = LDContext.create("important-user"); + LDContext otherUser = LDContext.create("unimportant-user"); + EventBroadcasterImpl broadcaster = + EventBroadcasterImpl.forFlagChangeEvents(TestComponents.sharedExecutor, testLogger); + Map, LDValue> resultMap = new HashMap<>(); + + FlagTrackerImpl tracker = new FlagTrackerImpl(broadcaster, + (k, u) -> LDValue.normalize(resultMap.get(new AbstractMap.SimpleEntry<>(k, u)))); + + resultMap.put(new AbstractMap.SimpleEntry<>(flagKey, user), LDValue.of(false)); + resultMap.put(new AbstractMap.SimpleEntry<>(flagKey, otherUser), LDValue.of(false)); + + BlockingQueue eventSink1 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink2 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink3 = new LinkedBlockingQueue<>(); + tracker.addFlagValueChangeListener(flagKey, user, eventSink1::add); + FlagChangeListener listener2 = tracker.addFlagValueChangeListener(flagKey, user, eventSink2::add); + tracker.removeFlagChangeListener(listener2); // just verifying that the remove method works + tracker.addFlagValueChangeListener(flagKey, otherUser, eventSink3::add); + + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink3, 100, TimeUnit.MILLISECONDS); + + // make the flag true for the first user only, and broadcast a flag change event + resultMap.put(new AbstractMap.SimpleEntry<>(flagKey, user), LDValue.of(true)); + broadcaster.broadcast(new FlagChangeEvent(flagKey)); + + // eventSink1 receives a value change event + FlagValueChangeEvent event1 = awaitValue(eventSink1, 1, TimeUnit.SECONDS); + assertThat(event1.getKey(), equalTo(flagKey)); + assertThat(event1.getOldValue(), equalTo(LDValue.of(false))); + assertThat(event1.getNewValue(), equalTo(LDValue.of(true))); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + + // eventSink2 doesn't receive one, because it was unregistered + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + // eventSink3 doesn't receive one, because the flag's value hasn't changed for otherUser + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/InMemoryDataStoreTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/InMemoryDataStoreTest.java new file mode 100644 index 0000000..e507dfb --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/InMemoryDataStoreTest.java @@ -0,0 +1,21 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.junit.Test; + +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class InMemoryDataStoreTest extends DataStoreTestBase { + + @Override + protected DataStore makeStore() { + return new InMemoryDataStore(); + } + + @Test + public void cacheStatsAreNull() { + assertNull(makeStore().getCacheStats()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/JsonHelpersTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/JsonHelpersTest.java new file mode 100644 index 0000000..2ca157c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/JsonHelpersTest.java @@ -0,0 +1,73 @@ +package com.launchdarkly.sdk.server; + +import com.google.gson.annotations.JsonAdapter; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class JsonHelpersTest { + @Test + public void serialize() { + MySerializableClass instance = new MySerializableClass(); + instance.value = 3; + assertEquals("{\"value\":3}", JsonHelpers.serialize(instance)); + } + + @Test + public void deserialize() { + MySerializableClass instance = JsonHelpers.deserialize("{\"value\":3}", MySerializableClass.class); + assertNotNull(instance); + assertEquals(3, instance.value); + } + + @Test(expected=SerializationException.class) + public void deserializeInvalidJson() { + JsonHelpers.deserialize("{\"value", MySerializableClass.class); + } + + @Test + public void postProcessingTypeAdapterFactoryCallsAfterDeserializedIfApplicable() { + // This tests the mechanism that ensures afterDeserialize() is called on every FeatureFlag or + // Segment that we deserialize. + MyClassWithAnAfterDeserializeMethod instance = + JsonHelpers.gsonInstance().fromJson("{}", MyClassWithAnAfterDeserializeMethod.class); + assertNotNull(instance); + assertTrue(instance.wasCalled); + } + + @Test + public void postProcessingTypeAdapterFactoryDoesNothingIfClassDoesNotImplementInterface() { + // If we accidentally apply this type adapter to something inapplicable, it's a no-op. + SomeOtherClass instance = JsonHelpers.gsonInstance().fromJson("{}", SomeOtherClass.class); + assertNotNull(instance); + } + + @Test + public void postProcessingTypeAdapterFactoryDoesNotAffectSerialization() { + MyClassWithAnAfterDeserializeMethod instance = new MyClassWithAnAfterDeserializeMethod(); + String json = JsonHelpers.gsonInstance().toJson(instance); + assertEquals("{\"wasCalled\":false}", json); + } + + static class MySerializableClass { + int value; + } + + @JsonAdapter(JsonHelpers.PostProcessingDeserializableTypeAdapterFactory.class) + static class MyClassWithAnAfterDeserializeMethod implements JsonHelpers.PostProcessingDeserializable { + boolean wasCalled = false; + + @Override + public void afterDeserialized() { + wasCalled = true; + } + } + + @JsonAdapter(JsonHelpers.PostProcessingDeserializableTypeAdapterFactory.class) + static class SomeOtherClass {} +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientBigSegmentsTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientBigSegmentsTest.java new file mode 100644 index 0000000..11c8933 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientBigSegmentsTest.java @@ -0,0 +1,113 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason.BigSegmentsStatus; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.easymock.EasyMockSupport; +import org.junit.Before; +import org.junit.Test; + +import java.util.Collections; + +import static com.launchdarkly.sdk.server.BigSegmentStoreWrapper.hashForUserKey; +import static com.launchdarkly.sdk.server.Evaluator.makeBigSegmentRef; +import static com.launchdarkly.sdk.server.ModelBuilders.booleanFlagWithClauses; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static com.launchdarkly.sdk.server.TestUtil.upsertSegment; +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.isA; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientBigSegmentsTest extends BaseTest { + private final LDContext user = LDContext.create("userkey"); + private final Segment bigSegment = segmentBuilder("segmentkey").unbounded(true).generation(1).build(); + private final FeatureFlag flag = booleanFlagWithClauses("flagkey", clauseMatchingSegment(bigSegment)); + + private LDConfig.Builder configBuilder; + private BigSegmentStore storeMock; + private ComponentConfigurer storeFactoryMock; + private final EasyMockSupport mocks = new EasyMockSupport(); + + @SuppressWarnings("unchecked") + @Before + public void setup() { + DataStore dataStore = initedDataStore(); + upsertFlag(dataStore, flag); + upsertSegment(dataStore, bigSegment); + + storeMock = mocks.niceMock(BigSegmentStore.class); + storeFactoryMock = mocks.strictMock(ComponentConfigurer.class); + expect(storeFactoryMock.build(isA(ClientContext.class))).andReturn(storeMock); + + configBuilder = baseConfig().dataStore(specificComponent(dataStore)); + } + + @Test + public void userNotFound() throws Exception { + expect(storeMock.getMetadata()).andAnswer(() -> new StoreMetadata(System.currentTimeMillis())).anyTimes(); + expect(storeMock.getMembership(hashForUserKey(user.getKey()))).andReturn(null); + mocks.replayAll(); + + LDConfig config = configBuilder.bigSegments(Components.bigSegments(storeFactoryMock)).build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + EvaluationDetail result = client.boolVariationDetail("flagkey", user, false); + assertFalse(result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } + } + + @Test + public void userFound() throws Exception { + Membership membership = createMembershipFromSegmentRefs(Collections.singleton(makeBigSegmentRef(bigSegment)), null); + expect(storeMock.getMetadata()).andAnswer(() -> new StoreMetadata(System.currentTimeMillis())).anyTimes(); + expect(storeMock.getMembership(hashForUserKey(user.getKey()))).andReturn(membership); + mocks.replayAll(); + + LDConfig config = configBuilder.bigSegments(Components.bigSegments(storeFactoryMock)).build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + EvaluationDetail result = client.boolVariationDetail("flagkey", user, false); + assertTrue(result.getValue()); + assertEquals(BigSegmentsStatus.HEALTHY, result.getReason().getBigSegmentsStatus()); + } + } + + @Test + public void storeError() throws Exception { + expect(storeMock.getMetadata()).andAnswer(() -> new StoreMetadata(System.currentTimeMillis())).anyTimes(); + expect(storeMock.getMembership(hashForUserKey(user.getKey()))).andThrow(new RuntimeException("sorry")); + mocks.replayAll(); + + LDConfig config = configBuilder.bigSegments(Components.bigSegments(storeFactoryMock)).build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + EvaluationDetail result = client.boolVariationDetail("flagkey", user, false); + assertFalse(result.getValue()); + assertEquals(BigSegmentsStatus.STORE_ERROR, result.getReason().getBigSegmentsStatus()); + } + } + + @Test + public void storeNotConfigured() throws Exception { + try (LDClient client = new LDClient("SDK_KEY", configBuilder.build())) { + EvaluationDetail result = client.boolVariationDetail("flagkey", user, false); + assertFalse(result.getValue()); + assertEquals(BigSegmentsStatus.NOT_CONFIGURED, result.getReason().getBigSegmentsStatus()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEndToEndTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEndToEndTest.java new file mode 100644 index 0000000..4ea95f9 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEndToEndTest.java @@ -0,0 +1,292 @@ +package com.launchdarkly.sdk.server; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.testhelpers.httptest.Handler; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestInfo; +import com.launchdarkly.testhelpers.httptest.SpecialHttpConfigurations; + +import org.junit.Test; + +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; + +import static com.launchdarkly.sdk.server.Components.externalUpdatesOnly; +import static com.launchdarkly.sdk.server.Components.noEvents; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.testhelpers.httptest.Handlers.bodyJson; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientEndToEndTest extends BaseTest { + private static final Gson gson = new Gson(); + private static final String sdkKey = "sdk-key"; + private static final String flagKey = "flag1"; + private static final DataModel.FeatureFlag flag = flagBuilder(flagKey) + .offVariation(0).variations(LDValue.of(true)) + .build(); + private static final LDContext user = LDContext.create("user-key"); + + private static Handler makePollingSuccessResponse() { + return bodyJson(makeAllDataJson()); + } + + private static Handler makeStreamingSuccessResponse() { + String streamData = "event: put\n" + + "data: {\"data\":" + makeAllDataJson() + "}"; + return Handlers.all(Handlers.SSE.start(), + Handlers.SSE.event(streamData), Handlers.SSE.leaveOpen()); + } + + private static Handler makeInvalidSdkKeyResponse() { + return Handlers.status(401); + } + + private static Handler makeServiceUnavailableResponse() { + return Handlers.status(503); + } + + @Test + public void clientStartsInPollingMode() throws Exception { + try (HttpServer server = HttpServer.start(makePollingSuccessResponse())) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().polling(server.getUri())) + .dataSource(Components.pollingDataSource()) + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + assertTrue(client.boolVariation(flagKey, user, false)); + } + } + } + + @Test + public void clientStartsInPollingModeAfterRecoverableError() throws Exception { + Handler errorThenSuccess = Handlers.sequential( + makeServiceUnavailableResponse(), + makePollingSuccessResponse() + ); + + try (HttpServer server = HttpServer.start(errorThenSuccess)) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().polling(server.getUri())) + .dataSource(Components.pollingDataSourceInternal() + .pollIntervalWithNoMinimum(Duration.ofMillis(5))) // use small interval because we expect it to retry + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + assertTrue(client.boolVariation(flagKey, user, false)); + } + } + } + + @Test + public void clientFailsInPollingModeWith401Error() throws Exception { + try (HttpServer server = HttpServer.start(makeInvalidSdkKeyResponse())) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().polling(server.getUri())) + .dataSource(Components.pollingDataSourceInternal() + .pollIntervalWithNoMinimum(Duration.ofMillis(5))) // use small interval so we'll know if it does not stop permanently + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertFalse(client.isInitialized()); + assertFalse(client.boolVariation(flagKey, user, false)); + + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + @Test + public void testPollingModeSpecialHttpConfigurations() throws Exception { + testWithSpecialHttpConfigurations( + makePollingSuccessResponse(), + (serverUri, httpConfig) -> + baseConfig() + .serviceEndpoints(Components.serviceEndpoints().polling(serverUri)) + .dataSource(Components.pollingDataSource()) + .events(noEvents()) + .http(httpConfig)); + } + + @Test + public void clientStartsInStreamingMode() throws Exception { + try (HttpServer server = HttpServer.start(makeStreamingSuccessResponse())) { + LDConfig config = baseConfig() + .dataSource(Components.streamingDataSource()) + .serviceEndpoints(Components.serviceEndpoints().streaming(server.getUri())) + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + assertTrue(client.boolVariation(flagKey, user, false)); + } + } + } + + @Test + public void clientStartsInStreamingModeAfterRecoverableError() throws Exception { + Handler errorThenStream = Handlers.sequential( + makeServiceUnavailableResponse(), + makeStreamingSuccessResponse() + ); + + try (HttpServer server = HttpServer.start(errorThenStream)) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().streaming(server.getUri())) + .dataSource(Components.streamingDataSource().initialReconnectDelay(Duration.ZERO)) + // use zero reconnect delay so we'll know if it does not stop permanently + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + assertTrue(client.boolVariation(flagKey, user, false)); + + server.getRecorder().requireRequest(); + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + @Test + public void clientFailsInStreamingModeWith401Error() throws Exception { + try (HttpServer server = HttpServer.start(makeInvalidSdkKeyResponse())) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().streaming(server.getUri())) + .dataSource(Components.streamingDataSource().initialReconnectDelay(Duration.ZERO)) + // use zero reconnect delay so we'll know if it does not stop permanently + .events(noEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertFalse(client.isInitialized()); + assertFalse(client.boolVariation(flagKey, user, false)); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + client.getDataSourceStatusProvider().addStatusListener(statuses::add); + + Thread.sleep(100); // make sure it didn't retry the connection + assertThat(client.getDataSourceStatusProvider().getStatus().getState(), + equalTo(DataSourceStatusProvider.State.OFF)); + while (!statuses.isEmpty()) { + // The status listener may or may not have been registered early enough to receive + // the OFF notification, but we should at least not see any *other* statuses. + assertThat(statuses.take().getState(), equalTo(DataSourceStatusProvider.State.OFF)); + } + assertThat(statuses.isEmpty(), equalTo(true)); + + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + @Test + public void testStreamingModeSpecialHttpConfigurations() throws Exception { + testWithSpecialHttpConfigurations( + makeStreamingSuccessResponse(), + (serverUri, httpConfig) -> + baseConfig() + .serviceEndpoints(Components.serviceEndpoints().streaming(serverUri)) + .dataSource(Components.streamingDataSource()) + .events(noEvents()) + .http(httpConfig)); + } + + @Test + public void clientSendsAnalyticsEvent() throws Exception { + Handler resp = Handlers.status(202); + + try (HttpServer server = HttpServer.start(resp)) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().events(server.getUri())) + .dataSource(externalUpdatesOnly()) + .diagnosticOptOut(true) + .events(Components.sendEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + client.identify(user); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/bulk", req.getPath()); + } + } + + @Test + public void clientSendsDiagnosticEvent() throws Exception { + Handler resp = Handlers.status(202); + + try (HttpServer server = HttpServer.start(resp)) { + LDConfig config = baseConfig() + .serviceEndpoints(Components.serviceEndpoints().events(server.getUri())) + .dataSource(externalUpdatesOnly()) + .events(Components.sendEvents()) + .build(); + + try (LDClient client = new LDClient(sdkKey, config)) { + assertTrue(client.isInitialized()); + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/diagnostic", req.getPath()); + } + } + } + + private static void testWithSpecialHttpConfigurations(Handler handler, + BiFunction, LDConfig.Builder> makeConfig) throws Exception { + SpecialHttpConfigurations.testAll(handler, + (URI serverUri, SpecialHttpConfigurations.Params params) -> { + LDConfig config = makeConfig.apply(serverUri, TestUtil.makeHttpConfigurationFromTestParams(params)) + .startWait(Duration.ofSeconds(10)) // allow extra time to be sure it can connect + .build(); + try (LDClient client = new LDClient(sdkKey, config)) { + if (!client.isInitialized()) { + throw new IOException("client did not initialize successfully"); + } + if (!client.boolVariation(flagKey, user, false)) { + throw new IOException("client said it initialized, but did not have correct flag data"); + } + } + return true; + } + ); + } + + private static String makeAllDataJson() { + JsonObject flagsData = new JsonObject(); + flagsData.add(flagKey, gson.toJsonTree(flag)); + JsonObject allData = new JsonObject(); + allData.add("flags", flagsData); + allData.add("segments", new JsonObject()); + return gson.toJson(allData); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEvaluationTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEvaluationTest.java new file mode 100644 index 0000000..b284afa --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEvaluationTest.java @@ -0,0 +1,656 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.google.gson.Gson; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import org.junit.Test; + +import java.time.Duration; +import java.util.Map; + +import static com.google.common.collect.Iterables.getFirst; +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.Evaluator.EXPECTED_EXCEPTION_FROM_INVALID_FLAG; +import static com.launchdarkly.sdk.server.Evaluator.INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION; +import static com.launchdarkly.sdk.server.ModelBuilders.booleanFlagWithClauses; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingSegment; +import static com.launchdarkly.sdk.server.ModelBuilders.fallthroughVariation; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestComponents.dataStoreThatThrowsException; +import static com.launchdarkly.sdk.server.TestComponents.failedDataSource; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static com.launchdarkly.sdk.server.TestUtil.upsertSegment; +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientEvaluationTest extends BaseTest { + private static final LDContext context = LDContext.create("userkey"); + private static final LDContext invalidContext = LDContext.create(null); + private static final Gson gson = new Gson(); + + private DataStore dataStore = initedDataStore(); + + private LDConfig config = baseConfig() + .dataStore(specificComponent(dataStore)) + .build(); + private LDClientInterface client = new LDClient("SDK_KEY", config); + + public interface EvalMethod { + public ValueT apply(LDClientInterface client, String flagKey, ContextT contextOrUser, ValueT defaultVal); + } + + public interface EvalDetailMethod { + public EvaluationDetail apply(LDClientInterface client, String flagKey, ContextT contextOrUser, ValueT defaultVal); + } + + private void doTypedVariationTests( + EvalMethod variationMethod, + EvalDetailMethod variationDetailMethod, + T expectedValue, + LDValue expectedLdValue, + T defaultValue, + LDValue wrongTypeLdValue + ) + { + String flagKey = "flagkey", + wrongTypeFlagKey = "wrongtypekey", + nullValueFlagKey = "nullvaluekey", + unknownKey = "unknownkey"; + + TestData testData = TestData.dataSource(); + testData.update(testData.flag(flagKey).on(true).variations(LDValue.ofNull(), expectedLdValue) + .variationForUser(context.getKey(), 1)); + testData.update(testData.flag(nullValueFlagKey).on(true).variations(LDValue.ofNull()) + .variationForUser(context.getKey(), 0)); + testData.update(testData.flag(wrongTypeFlagKey).on(true).variations(LDValue.ofNull(), wrongTypeLdValue) + .variationForUser(context.getKey(), 1)); + + LDClientInterface client = new LDClient("SDK_KEY", baseConfig().dataSource(testData).build()); + + assertEquals(expectedValue, variationMethod.apply(client, flagKey, context, defaultValue)); + + assertEquals(EvaluationDetail.fromValue(expectedValue, 1, EvaluationReason.targetMatch()), + variationDetailMethod.apply(client, flagKey, context, defaultValue)); + + // unknown flag + assertEquals(defaultValue, variationMethod.apply(client, unknownKey, context, defaultValue)); + assertEquals(EvaluationDetail.fromValue(defaultValue, -1, EvaluationReason.error(EvaluationReason.ErrorKind.FLAG_NOT_FOUND)), + variationDetailMethod.apply(client, unknownKey, context, defaultValue)); + + // invalid/null context/user + assertEquals(defaultValue, variationMethod.apply(client, flagKey, invalidContext, defaultValue)); + assertEquals(defaultValue, variationMethod.apply(client, flagKey, null, defaultValue)); + assertEquals(EvaluationDetail.fromValue(defaultValue, -1, EvaluationReason.error(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED)), + variationDetailMethod.apply(client, flagKey, invalidContext, defaultValue)); + assertEquals(EvaluationDetail.fromValue(defaultValue, -1, EvaluationReason.error(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED)), + variationDetailMethod.apply(client, flagKey, null, defaultValue)); + + // wrong type + if (wrongTypeLdValue != null) + { + assertEquals(defaultValue, variationMethod.apply(client, wrongTypeFlagKey, context, defaultValue)); + assertEquals(EvaluationDetail.fromValue(defaultValue, -1, EvaluationReason.error(EvaluationReason.ErrorKind.WRONG_TYPE)), + variationDetailMethod.apply(client, wrongTypeFlagKey, context, defaultValue)); + } + } + + @Test + public void boolEvaluations() { + doTypedVariationTests( + (LDClientInterface c, String f, LDContext ctx, Boolean d) -> c.boolVariation(f, ctx, d), + (LDClientInterface c, String f, LDContext ctx, Boolean d) -> c.boolVariationDetail(f, ctx, d), + true, + LDValue.of(true), + false, + LDValue.of("wrongtype") + ); + } + + @Test + public void intEvaluations() { + doTypedVariationTests( + (LDClientInterface c, String f, LDContext ctx, Integer d) -> c.intVariation(f, ctx, d), + (LDClientInterface c, String f, LDContext ctx, Integer d) -> c.intVariationDetail(f, ctx, d), + 2, + LDValue.of(2), + 1, + LDValue.of("wrongtype") + ); + } + + @Test + public void doubleEvaluations() { + doTypedVariationTests( + (LDClientInterface c, String f, LDContext ctx, Double d) -> c.doubleVariation(f, ctx, d), + (LDClientInterface c, String f, LDContext ctx, Double d) -> c.doubleVariationDetail(f, ctx, d), + 2.5d, + LDValue.of(2.5d), + 1.5d, + LDValue.of("wrongtype") + ); + } + + @Test + public void jsonEvaluations() { + LDValue data = LDValue.buildObject().put("thing", "stuff").build(); + LDValue defaultValue = LDValue.of("default"); + doTypedVariationTests( + (LDClientInterface c, String f, LDContext ctx, LDValue d) -> c.jsonValueVariation(f, ctx, d), + (LDClientInterface c, String f, LDContext ctx, LDValue d) -> c.jsonValueVariationDetail(f, ctx, d), + data, + data, + defaultValue, + null + ); + } + + @Test + public void intVariationReturnsFlagValueEvenIfEncodedAsDouble() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(2.0))); + + assertEquals(2, client.intVariation("key", context, 1)); + } + + @Test + public void intVariationFromDoubleRoundsTowardZero() throws Exception { + upsertFlag(dataStore, flagWithValue("flag1", LDValue.of(2.25))); + upsertFlag(dataStore, flagWithValue("flag2", LDValue.of(2.75))); + upsertFlag(dataStore, flagWithValue("flag3", LDValue.of(-2.25))); + upsertFlag(dataStore, flagWithValue("flag4", LDValue.of(-2.75))); + + assertEquals(2, client.intVariation("flag1", context, 1)); + assertEquals(2, client.intVariation("flag2", context, 1)); + assertEquals(-2, client.intVariation("flag3", context, 1)); + assertEquals(-2, client.intVariation("flag4", context, 1)); + } + + @Test + public void doubleVariationReturnsFlagValueEvenIfEncodedAsInt() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(2))); + + assertEquals(2.0d, client.doubleVariation("key", context, 1.0d), 0d); + } + + @Test + public void doubleVariationReturnsDefaultValueForUnknownFlag() throws Exception { + assertEquals(1.0d, client.doubleVariation("key", context, 1.0d), 0d); + + assertEquals(EvaluationDetail.fromValue(1.0d, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.FLAG_NOT_FOUND)), + client.doubleVariationDetail("key", context, 1.0d)); + } + + @Test + public void doubleVariationReturnsDefaultValueForWrongType() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("wrong"))); + + assertEquals(1.0d, client.doubleVariation("key", context, 1.0d), 0d); + + assertEquals(EvaluationDetail.fromValue(1.0d, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.WRONG_TYPE)), + client.doubleVariationDetail("key", context, 1.0d)); + } + + @Test + public void stringVariationWithNullDefaultReturnsFlagValue() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("b"))); + + assertEquals("b", client.stringVariation("key", context, null)); + } + + @Test + public void stringVariationWithNullDefaultReturnsDefaultValueForUnknownFlag() throws Exception { + assertNull(client.stringVariation("key", context, null)); + + assertEquals(EvaluationDetail.fromValue((String)null, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.FLAG_NOT_FOUND)), + client.stringVariationDetail("key", context, null)); + } + + @Test + public void stringVariationReturnsDefaultValueForWrongType() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + assertEquals("a", client.stringVariation("key", context, "a")); + + assertEquals(EvaluationDetail.fromValue("a", NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.WRONG_TYPE)), + client.stringVariationDetail("key", context, "a")); + } + + @Test + public void stringVariationWithNullDefaultReturnsDefaultValueForWrongType() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + assertNull(client.stringVariation("key", context, null)); + + assertEquals(EvaluationDetail.fromValue((String)null, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.WRONG_TYPE)), + client.stringVariationDetail("key", context, null)); + } + + @Test + public void canMatchUserBySegment() throws Exception { + // This is similar to EvaluatorSegmentMatchTest, but more end-to-end - we're verifying that + // the client is forwarding the Evaluator's segment queries to the data store + DataModel.Segment segment = segmentBuilder("segment1") + .version(1) + .included(context.getKey()) + .build(); + upsertSegment(dataStore, segment); + + DataModel.Clause clause = clauseMatchingSegment("segment1"); + DataModel.FeatureFlag feature = booleanFlagWithClauses("feature", clause); + upsertFlag(dataStore, feature); + + assertTrue(client.boolVariation("feature", context, false)); + } + + @Test + public void canTryToMatchUserBySegmentWhenSegmentIsNotFound() throws Exception { + // This is similar to EvaluatorSegmentMatchTest, but more end-to-end - we're verifying that + // the client is forwarding the Evaluator's segment queries to the data store, and that we + // don't blow up if the segment is missing. + DataModel.Clause clause = clauseMatchingSegment("segment1"); + DataModel.FeatureFlag feature = booleanFlagWithClauses("feature", clause); + upsertFlag(dataStore, feature); + + assertFalse(client.boolVariation("feature", context, false)); + } + + @Test + public void canGetDetailsForSuccessfulEvaluation() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + EvaluationDetail expectedResult = EvaluationDetail.fromValue(true, + 0, EvaluationReason.off()); + assertEquals(expectedResult, client.boolVariationDetail("key", context, false)); + } + + @Test + public void jsonVariationReturnsNullIfFlagEvaluatesToNull() { + DataModel.FeatureFlag flag = flagBuilder("key").on(false).offVariation(0).variations(LDValue.ofNull()).build(); + upsertFlag(dataStore, flag); + + assertEquals(LDValue.ofNull(), client.jsonValueVariation("key", context, LDValue.buildObject().build())); + } + + @Test + public void typedVariationReturnsZeroValueForTypeIfFlagEvaluatesToNull() { + DataModel.FeatureFlag flag = flagBuilder("key").on(false).offVariation(0).variations(LDValue.ofNull()).build(); + upsertFlag(dataStore, flag); + + assertEquals(false, client.boolVariation("key", context, true)); + assertEquals(0, client.intVariation("key", context, 1)); + assertEquals(0d, client.doubleVariation("key", context, 1.0d), 0d); + } + + @Test + public void variationDetailReturnsDefaultIfFlagEvaluatesToNull() { + DataModel.FeatureFlag flag = flagBuilder("key").on(false).offVariation(null).build(); + upsertFlag(dataStore, flag); + + EvaluationDetail expected = EvaluationDetail.fromValue("default", + NO_VARIATION, EvaluationReason.off()); + EvaluationDetail actual = client.stringVariationDetail("key", context, "default"); + assertEquals(expected, actual); + assertTrue(actual.isDefaultValue()); + } + + @Test + public void deletedFlagPlaceholderIsTreatedAsUnknownFlag() { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of("hello")); + upsertFlag(dataStore, flag); + dataStore.upsert(DataModel.FEATURES, flag.getKey(), ItemDescriptor.deletedItem(flag.getVersion() + 1)); + + assertEquals("default", client.stringVariation(flag.getKey(), context, "default")); + } + + @Test + public void appropriateErrorIfClientNotInitialized() throws Exception { + DataStore badDataStore = new InMemoryDataStore(); + LDConfig badConfig = baseConfig() + .dataStore(specificComponent(badDataStore)) + .dataSource(specificComponent(failedDataSource())) + .startWait(Duration.ZERO) + .build(); + try (LDClientInterface badClient = new LDClient("SDK_KEY", badConfig)) { + EvaluationDetail expectedResult = EvaluationDetail.fromValue(false, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.CLIENT_NOT_READY)); + assertEquals(expectedResult, badClient.boolVariationDetail("key", context, false)); + } + } + + @Test + public void appropriateErrorIfFlagDoesNotExist() throws Exception { + EvaluationDetail expectedResult = EvaluationDetail.fromValue("default", NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.FLAG_NOT_FOUND)); + assertEquals(expectedResult, client.stringVariationDetail("key", context, "default")); + } + + @Test + public void appropriateErrorIfContextIsNull() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + EvaluationDetail expectedResult = EvaluationDetail.fromValue("default", NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED)); + assertEquals(expectedResult, client.stringVariationDetail("key", null, "default")); + } + + @Test + public void appropriateErrorIfContextIsInvalid() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + EvaluationDetail expectedResult = EvaluationDetail.fromValue("default", NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.USER_NOT_SPECIFIED)); + assertEquals(expectedResult, client.stringVariationDetail("key", invalidContext, "default")); + } + + @Test + public void appropriateErrorIfValueWrongType() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of(true))); + + EvaluationDetail expectedResult = EvaluationDetail.fromValue(3, NO_VARIATION, + EvaluationReason.error(EvaluationReason.ErrorKind.WRONG_TYPE)); + assertEquals(expectedResult, client.intVariationDetail("key", context, 3)); + } + + @Test + public void appropriateErrorForUnexpectedExceptionFromDataStore() throws Exception { + RuntimeException exception = new RuntimeException("sorry"); + DataStore badDataStore = dataStoreThatThrowsException(exception); + LDConfig badConfig = baseConfig() + .dataStore(specificComponent(badDataStore)) + .build(); + try (LDClientInterface badClient = new LDClient("SDK_KEY", badConfig)) { + EvaluationDetail expectedResult = EvaluationDetail.fromValue(false, NO_VARIATION, + EvaluationReason.exception(exception)); + assertEquals(expectedResult, badClient.boolVariationDetail("key", context, false)); + } + } + + @Test + public void appropriateErrorForUnexpectedExceptionFromFlagEvaluation() throws Exception { + upsertFlag(dataStore, flagWithValue(INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION, LDValue.of(true))); + + EvaluationDetail expectedResult = EvaluationDetail.fromValue(false, NO_VARIATION, + EvaluationReason.exception(EXPECTED_EXCEPTION_FROM_INVALID_FLAG)); + assertEquals(expectedResult, client.boolVariationDetail(INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION, context, false)); + } + + @Test + public void evaluationUsesStoreIfStoreIsInitializedButClientIsNot() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("value"))); + LDConfig customConfig = baseConfig() + .dataStore(specificComponent(dataStore)) + .dataSource(specificComponent(failedDataSource())) + .startWait(Duration.ZERO) + .build(); + + try (LDClient client = new LDClient("SDK_KEY", customConfig)) { + assertFalse(client.isInitialized()); + + assertEquals("value", client.stringVariation("key", context, "")); + } + } + + @Test + public void allFlagsStateReturnsState() throws Exception { + DataModel.FeatureFlag flag1 = flagBuilder("key1") + .version(100) + .trackEvents(false) + .on(false) + .offVariation(0) + .variations(LDValue.of("value1")) + .build(); + DataModel.FeatureFlag flag2 = flagBuilder("key2") + .version(200) + .trackEvents(true) + .debugEventsUntilDate(1000L) + .on(true) + .fallthrough(fallthroughVariation(1)) + .variations(LDValue.of("off"), LDValue.of("value2")) + .build(); + DataModel.FeatureFlag flag3 = flagBuilder("key3") + .version(300) + .on(true) + .fallthroughVariation(1) + .variations(LDValue.of("x"), LDValue.of("value3")) + .trackEvents(false) + .trackEventsFallthrough(true) + .build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag2); + upsertFlag(dataStore, flag3); + + FeatureFlagsState state = client.allFlagsState(context); + assertTrue(state.isValid()); + + String json = "{\"key1\":\"value1\",\"key2\":\"value2\",\"key3\":\"value3\"," + + "\"$flagsState\":{" + + "\"key1\":{" + + "\"variation\":0,\"version\":100" + + "},\"key2\":{" + + "\"variation\":1,\"version\":200,\"trackEvents\":true,\"debugEventsUntilDate\":1000" + + "},\"key3\":{" + + "\"variation\":1,\"version\":300,\"trackEvents\":true,\"trackReason\":true,\"reason\":{\"kind\":\"FALLTHROUGH\"}" + + "}" + + "}," + + "\"$valid\":true" + + "}"; + assertJsonEquals(json, gson.toJson(state)); + } + + @Test + public void allFlagsStateCanFilterForOnlyClientSideFlags() { + DataModel.FeatureFlag flag1 = flagBuilder("server-side-1").build(); + DataModel.FeatureFlag flag2 = flagBuilder("server-side-2").build(); + DataModel.FeatureFlag flag3 = flagBuilder("client-side-1").clientSide(true) + .variations(LDValue.of("value1")).offVariation(0).build(); + DataModel.FeatureFlag flag4 = flagBuilder("client-side-2").clientSide(true) + .variations(LDValue.of("value2")).offVariation(0).build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag2); + upsertFlag(dataStore, flag3); + upsertFlag(dataStore, flag4); + + FeatureFlagsState state = client.allFlagsState(context, FlagsStateOption.CLIENT_SIDE_ONLY); + assertTrue(state.isValid()); + + Map allValues = state.toValuesMap(); + assertEquals(ImmutableMap.of("client-side-1", LDValue.of("value1"), "client-side-2", LDValue.of("value2")), allValues); + } + + @Test + public void allFlagsStateReturnsStateWithReasons() { + DataModel.FeatureFlag flag1 = flagBuilder("key1") + .version(100) + .trackEvents(false) + .on(false) + .offVariation(0) + .variations(LDValue.of("value1")) + .build(); + DataModel.FeatureFlag flag2 = flagBuilder("key2") + .version(200) + .trackEvents(true) + .debugEventsUntilDate(1000L) + .on(true) + .fallthrough(fallthroughVariation(1)) + .variations(LDValue.of("off"), LDValue.of("value2")) + .build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag2); + + FeatureFlagsState state = client.allFlagsState(context, FlagsStateOption.WITH_REASONS); + assertTrue(state.isValid()); + + String json = "{\"key1\":\"value1\",\"key2\":\"value2\"," + + "\"$flagsState\":{" + + "\"key1\":{" + + "\"variation\":0,\"version\":100,\"reason\":{\"kind\":\"OFF\"}" + + "},\"key2\":{" + + "\"variation\":1,\"version\":200,\"reason\":{\"kind\":\"FALLTHROUGH\"},\"trackEvents\":true,\"debugEventsUntilDate\":1000" + + "}" + + "}," + + "\"$valid\":true" + + "}"; + assertJsonEquals(json, gson.toJson(state)); + } + + @Test + public void allFlagsStateCanOmitDetailsForUntrackedFlags() { + long futureTime = System.currentTimeMillis() + 1000000; + DataModel.FeatureFlag flag1 = flagBuilder("key1") + .version(100) + .trackEvents(false) + .on(false) + .offVariation(0) + .variations(LDValue.of("value1")) + .build(); + DataModel.FeatureFlag flag2 = flagBuilder("key2") + .version(200) + .trackEvents(true) + .on(true) + .fallthrough(fallthroughVariation(1)) + .variations(LDValue.of("off"), LDValue.of("value2")) + .build(); + DataModel.FeatureFlag flag3 = flagBuilder("key3") + .version(300) + .trackEvents(false) + .debugEventsUntilDate(futureTime) // event tracking is turned on temporarily even though trackEvents is false + .on(false) + .offVariation(0) + .variations(LDValue.of("value3")) + .build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag2); + upsertFlag(dataStore, flag3); + + FeatureFlagsState state = client.allFlagsState(context, FlagsStateOption.WITH_REASONS, FlagsStateOption.DETAILS_ONLY_FOR_TRACKED_FLAGS); + assertTrue(state.isValid()); + + String json = "{\"key1\":\"value1\",\"key2\":\"value2\",\"key3\":\"value3\"," + + "\"$flagsState\":{" + + "\"key1\":{" + + "\"variation\":0" + // note, version and reason are omitted, and so is trackEvents: false + "},\"key2\":{" + + "\"variation\":1,\"version\":200,\"reason\":{\"kind\":\"FALLTHROUGH\"},\"trackEvents\":true" + + "},\"key3\":{" + + "\"variation\":0,\"version\":300,\"reason\":{\"kind\":\"OFF\"},\"debugEventsUntilDate\":" + futureTime + + "}" + + "}," + + "\"$valid\":true" + + "}"; + assertJsonEquals(json, gson.toJson(state)); + } + + @Test + public void allFlagsStateFiltersOutDeletedFlags() throws Exception { + DataModel.FeatureFlag flag1 = flagBuilder("key1").version(1).build(); + DataModel.FeatureFlag flag2 = flagBuilder("key2").version(1).build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag2); + dataStore.upsert(FEATURES, flag2.getKey(), ItemDescriptor.deletedItem(flag2.getVersion() + 1)); + + FeatureFlagsState state = client.allFlagsState(context); + assertTrue(state.isValid()); + + Map valuesMap = state.toValuesMap(); + assertEquals(1, valuesMap.size()); + assertEquals(flag1.getKey(), getFirst(valuesMap.keySet(), null)); + } + + @Test + public void allFlagsStateReturnsEmptyStateForNullContext() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("value"))); + + FeatureFlagsState state = client.allFlagsState((LDContext)null); + assertFalse(state.isValid()); + assertEquals(0, state.toValuesMap().size()); + } + + @Test + public void allFlagsStateReturnsEmptyStateForInvalidContext() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("value"))); + + FeatureFlagsState state = client.allFlagsState(invalidContext); + assertFalse(state.isValid()); + assertEquals(0, state.toValuesMap().size()); + } + + @Test + public void allFlagsStateReturnsEmptyStateIfDataStoreThrowsException() throws Exception { + LDConfig customConfig = baseConfig() + .dataStore(specificComponent(TestComponents.dataStoreThatThrowsException(new RuntimeException("sorry")))) + .startWait(Duration.ZERO) + .build(); + + try (LDClient client = new LDClient("SDK_KEY", customConfig)) { + FeatureFlagsState state = client.allFlagsState(context); + assertFalse(state.isValid()); + assertEquals(0, state.toValuesMap().size()); + } + } + + @Test + public void allFlagsStateUsesNullValueForFlagIfEvaluationThrowsException() throws Exception { + upsertFlag(dataStore, flagWithValue("goodkey", LDValue.of("value"))); + upsertFlag(dataStore, flagWithValue(INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION, LDValue.of("nope"))); + + FeatureFlagsState state = client.allFlagsState(context); + assertTrue(state.isValid()); + assertEquals(2, state.toValuesMap().size()); + assertEquals(LDValue.of("value"), state.getFlagValue("goodkey")); + assertEquals(LDValue.ofNull(), state.getFlagValue(INVALID_FLAG_KEY_THAT_THROWS_EXCEPTION)); + } + + @Test + public void allFlagsStateUsesStoreDataIfStoreIsInitializedButClientIsNot() throws Exception { + upsertFlag(dataStore, flagWithValue("key", LDValue.of("value"))); + LDConfig customConfig = baseConfig() + .dataStore(specificComponent(dataStore)) + .dataSource(specificComponent(failedDataSource())) + .startWait(Duration.ZERO) + .build(); + + try (LDClient client = new LDClient("SDK_KEY", customConfig)) { + assertFalse(client.isInitialized()); + + FeatureFlagsState state = client.allFlagsState(context); + assertTrue(state.isValid()); + assertEquals(LDValue.of("value"), state.getFlagValue("key")); + } + } + + @Test + public void allFlagsStateReturnsEmptyStateIfClientAndStoreAreNotInitialized() throws Exception { + LDConfig customConfig = baseConfig() + .dataSource(specificComponent(failedDataSource())) + .startWait(Duration.ZERO) + .build(); + + try (LDClient client = new LDClient("SDK_KEY", customConfig)) { + assertFalse(client.isInitialized()); + + FeatureFlagsState state = client.allFlagsState(context); + assertFalse(state.isValid()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEventTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEventTest.java new file mode 100644 index 0000000..4aea2e8 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientEventTest.java @@ -0,0 +1,620 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.EvaluationReason.ErrorKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.ModelBuilders.clauseMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.clauseNotMatchingContext; +import static com.launchdarkly.sdk.server.ModelBuilders.fallthroughVariation; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.ModelBuilders.prerequisite; +import static com.launchdarkly.sdk.server.ModelBuilders.ruleBuilder; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasKey; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientEventTest extends BaseTest { + private static final LDContext context = LDContext.create("userkey"); + private static final LDContext invalidContext = LDContext.create(null); + + private DataStore dataStore = initedDataStore(); + private TestComponents.TestEventProcessor eventSink = new TestComponents.TestEventProcessor(); + private LDConfig config = baseConfig() + .dataStore(specificComponent(dataStore)) + .events(specificComponent(eventSink)) + .build(); + private LDClientInterface client = new LDClient("SDK_KEY", config); + + @Test + public void identifySendsEvent() throws Exception { + client.identify(context); + + assertEquals(1, eventSink.events.size()); + Event e = eventSink.events.get(0); + assertEquals(Event.Identify.class, e.getClass()); + Event.Identify ie = (Event.Identify)e; + assertEquals(context.getKey(), ie.getContext().getKey()); + } + + @Test + public void identifyWithNullContextOrUserDoesNotSendEvent() { + client.identify((LDContext)null); + assertEquals(0, eventSink.events.size()); + } + + @Test + public void identifyWithInvalidContextDoesNotSendEvent() { + client.identify(invalidContext); + assertEquals(0, eventSink.events.size()); + } + + @Test + public void trackSendsEventWithoutData() throws Exception { + client.track("eventkey", context); + + assertEquals(1, eventSink.events.size()); + Event e = eventSink.events.get(0); + assertEquals(Event.Custom.class, e.getClass()); + Event.Custom ce = (Event.Custom)e; + assertEquals(context.getKey(), ce.getContext().getKey()); + assertEquals("eventkey", ce.getKey()); + assertEquals(LDValue.ofNull(), ce.getData()); + } + + @Test + public void trackSendsEventWithData() throws Exception { + LDValue data = LDValue.buildObject().put("thing", LDValue.of("stuff")).build(); + client.trackData("eventkey", context, data); + + assertEquals(1, eventSink.events.size()); + Event e = eventSink.events.get(0); + assertEquals(Event.Custom.class, e.getClass()); + Event.Custom ce = (Event.Custom)e; + assertEquals(context.getKey(), ce.getContext().getKey()); + assertEquals("eventkey", ce.getKey()); + assertEquals(data, ce.getData()); + } + + @Test + public void trackSendsEventWithDataAndMetricValue() throws Exception { + LDValue data = LDValue.buildObject().put("thing", LDValue.of("stuff")).build(); + double metricValue = 1.5; + client.trackMetric("eventkey", context, data, metricValue); + + assertEquals(1, eventSink.events.size()); + Event e = eventSink.events.get(0); + assertEquals(Event.Custom.class, e.getClass()); + Event.Custom ce = (Event.Custom)e; + assertEquals(context.getKey(), ce.getContext().getKey()); + assertEquals("eventkey", ce.getKey()); + assertEquals(data, ce.getData()); + assertEquals(Double.valueOf(metricValue), ce.getMetricValue()); + } + + @Test + public void trackWithNullContextOrUserDoesNotSendEvent() { + client.track("eventkey", (LDContext)null); + assertEquals(0, eventSink.events.size()); + + client.trackData("eventkey", (LDContext)null, LDValue.of(1)); + assertEquals(0, eventSink.events.size()); + + client.trackMetric("eventkey", (LDContext)null, LDValue.of(1), 1.5); + assertEquals(0, eventSink.events.size()); + } + + @Test + public void trackWithInvalidContextDoesNotSendEvent() { + client.track("eventkey", invalidContext); + assertEquals(0, eventSink.events.size()); + + client.trackData("eventkey", invalidContext, LDValue.of(1)); + assertEquals(0, eventSink.events.size()); + + client.trackMetric("eventkey", invalidContext, LDValue.of(1), 1.5); + assertEquals(0, eventSink.events.size()); + } + + @Test + public void boolVariationSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(true)); + upsertFlag(dataStore, flag); + + client.boolVariation("key", context, false); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(true), LDValue.of(false), null, null); + } + + @Test + public void boolVariationSendsEventForUnknownFlag() throws Exception { + client.boolVariation("key", context, false); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(false), null, null); + } + + @Test + public void boolVariationDetailSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(true)); + upsertFlag(dataStore, flag); + + client.boolVariationDetail("key", context, false); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(true), LDValue.of(false), null, EvaluationReason.off()); + } + + @Test + public void boolVariationDetailSendsEventForUnknownFlag() throws Exception { + client.boolVariationDetail("key", context, false); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(false), null, + EvaluationReason.error(ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void intVariationSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(2)); + upsertFlag(dataStore, flag); + + client.intVariation("key", context, 1); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(2), LDValue.of(1), null, null); + } + + @Test + public void intVariationSendsEventForUnknownFlag() throws Exception { + client.intVariation("key", context, 1); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(1), null, null); + } + + @Test + public void intVariationDetailSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(2)); + upsertFlag(dataStore, flag); + + client.intVariationDetail("key", context, 1); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(2), LDValue.of(1), null, EvaluationReason.off()); + } + + @Test + public void intVariationDetailSendsEventForUnknownFlag() throws Exception { + client.intVariationDetail("key", context, 1); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(1), null, + EvaluationReason.error(ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void doubleVariationSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(2.5d)); + upsertFlag(dataStore, flag); + + client.doubleVariation("key", context, 1.0d); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(2.5d), LDValue.of(1.0d), null, null); + } + + @Test + public void doubleVariationSendsEventForUnknownFlag() throws Exception { + client.doubleVariation("key", context, 1.0d); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(1.0), null, null); + } + + @Test + public void doubleVariationDetailSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(2.5d)); + upsertFlag(dataStore, flag); + + client.doubleVariationDetail("key", context, 1.0d); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of(2.5d), LDValue.of(1.0d), null, EvaluationReason.off()); + } + + @Test + public void doubleVariationDetailSendsEventForUnknownFlag() throws Exception { + client.doubleVariationDetail("key", context, 1.0d); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of(1.0), null, + EvaluationReason.error(ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void stringVariationSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of("b")); + upsertFlag(dataStore, flag); + + client.stringVariation("key", context, "a"); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of("b"), LDValue.of("a"), null, null); + } + + @Test + public void stringVariationSendsEventForUnknownFlag() throws Exception { + client.stringVariation("key", context, "a"); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of("a"), null, null); + } + + @Test + public void stringVariationDetailSendsEvent() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of("b")); + upsertFlag(dataStore, flag); + + client.stringVariationDetail("key", context, "a"); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, LDValue.of("b"), LDValue.of("a"), null, EvaluationReason.off()); + } + + @Test + public void stringVariationDetailSendsEventForUnknownFlag() throws Exception { + client.stringVariationDetail("key", context, "a"); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", LDValue.of("a"), null, + EvaluationReason.error(ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void jsonValueVariationDetailSendsEvent() throws Exception { + LDValue data = LDValue.buildObject().put("thing", LDValue.of("stuff")).build(); + DataModel.FeatureFlag flag = flagWithValue("key", data); + upsertFlag(dataStore, flag); + LDValue defaultVal = LDValue.of(42); + + client.jsonValueVariationDetail("key", context, defaultVal); + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), flag, data, defaultVal, null, EvaluationReason.off()); + } + + @Test + public void jsonValueVariationDetailSendsEventForUnknownFlag() throws Exception { + LDValue defaultVal = LDValue.of(42); + + client.jsonValueVariationDetail("key", context, defaultVal); + assertEquals(1, eventSink.events.size()); + checkUnknownFeatureEvent(eventSink.events.get(0), "key", defaultVal, null, + EvaluationReason.error(ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void variationDoesNotSendEventForInvalidContextOrNullUser() throws Exception { + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of("value")); + upsertFlag(dataStore, flag); + + client.boolVariation(flag.getKey(), invalidContext, false); + assertThat(eventSink.events, empty()); + + client.boolVariationDetail(flag.getKey(), invalidContext, false); + assertThat(eventSink.events, empty()); + } + + @Test + public void eventTrackingAndReasonCanBeForcedForRule() throws Exception { + Clause clause = clauseMatchingContext(context); + Rule rule = ruleBuilder().id("id").clauses(clause).variation(1).trackEvents(true).build(); + FeatureFlag flag = flagBuilder("flag") + .on(true) + .rules(rule) + .offVariation(0) + .variations(LDValue.of("off"), LDValue.of("on")) + .build(); + upsertFlag(dataStore, flag); + + client.stringVariation("flag", context, "default"); + + // Note, we did not call stringVariationDetail and the flag is not tracked, but we should still get + // tracking and a reason, because the rule-level trackEvents flag is on for the matched rule. + + assertEquals(1, eventSink.events.size()); + Event.FeatureRequest event = (Event.FeatureRequest)eventSink.events.get(0); + assertTrue(event.isTrackEvents()); + assertEquals(EvaluationReason.ruleMatch(0, "id"), event.getReason()); + } + + @Test + public void eventTrackingAndReasonAreNotForcedIfFlagIsNotSetForMatchingRule() throws Exception { + Clause clause0 = clauseNotMatchingContext(context); + Clause clause1 = clauseMatchingContext(context); + Rule rule0 = ruleBuilder().id("id0").clauses(clause0).variation(1).trackEvents(true).build(); + Rule rule1 = ruleBuilder().id("id1").clauses(clause1).variation(1).trackEvents(false).build(); + FeatureFlag flag = flagBuilder("flag") + .on(true) + .rules(rule0, rule1) + .offVariation(0) + .variations(LDValue.of("off"), LDValue.of("on")) + .build(); + upsertFlag(dataStore, flag); + + client.stringVariation("flag", context, "default"); + + // It matched rule1, which has trackEvents: false, so we don't get the override behavior + + assertEquals(1, eventSink.events.size()); + Event.FeatureRequest event = (Event.FeatureRequest)eventSink.events.get(0); + assertFalse(event.isTrackEvents()); + assertNull(event.getReason()); + } + + @Test + public void eventTrackingAndReasonCanBeForcedForFallthrough() throws Exception { + DataModel.FeatureFlag flag = flagBuilder("flag") + .on(true) + .fallthrough(new DataModel.VariationOrRollout(0, null)) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .trackEventsFallthrough(true) + .build(); + upsertFlag(dataStore, flag); + + client.stringVariation("flag", context, "default"); + + // Note, we did not call stringVariationDetail and the flag is not tracked, but we should still get + // tracking and a reason, because trackEventsFallthrough is on and the evaluation fell through. + + assertEquals(1, eventSink.events.size()); + Event.FeatureRequest event = (Event.FeatureRequest)eventSink.events.get(0); + assertTrue(event.isTrackEvents()); + assertEquals(EvaluationReason.fallthrough(), event.getReason()); + } + + @Test + public void eventTrackingAndReasonAreNotForcedForFallthroughIfFlagIsNotSet() throws Exception { + DataModel.FeatureFlag flag = flagBuilder("flag") + .on(true) + .fallthrough(new DataModel.VariationOrRollout(0, null)) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .trackEventsFallthrough(false) + .build(); + upsertFlag(dataStore, flag); + + client.stringVariation("flag", context, "default"); + + assertEquals(1, eventSink.events.size()); + Event.FeatureRequest event = (Event.FeatureRequest)eventSink.events.get(0); + assertFalse(event.isTrackEvents()); + assertNull(event.getReason()); + } + + @Test + public void eventTrackingAndReasonAreNotForcedForFallthroughIfReasonIsNotFallthrough() throws Exception { + DataModel.FeatureFlag flag = flagBuilder("flag") + .on(false) // so the evaluation reason will be OFF, not FALLTHROUGH + .offVariation(1) + .fallthrough(new DataModel.VariationOrRollout(0, null)) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .trackEventsFallthrough(true) + .build(); + upsertFlag(dataStore, flag); + + client.stringVariation("flag", context, "default"); + + assertEquals(1, eventSink.events.size()); + Event.FeatureRequest event = (Event.FeatureRequest)eventSink.events.get(0); + assertFalse(event.isTrackEvents()); + assertNull(event.getReason()); + } + + @Test + public void eventIsSentForExistingPrererequisiteFlag() throws Exception { + DataModel.FeatureFlag f0 = flagBuilder("feature0") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .version(1) + .build(); + DataModel.FeatureFlag f1 = flagBuilder("feature1") + .on(true) + .fallthrough(fallthroughVariation(1)) + .variations(LDValue.of("nogo"), LDValue.of("go")) + .version(2) + .build(); + upsertFlag(dataStore, f0); + upsertFlag(dataStore, f1); + + client.stringVariation("feature0", context, "default"); + + assertEquals(2, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), f1, LDValue.of("go"), LDValue.ofNull(), "feature0", null); + checkFeatureEvent(eventSink.events.get(1), f0, LDValue.of("fall"), LDValue.of("default"), null, null); + } + + @Test + public void eventIsSentWithReasonForExistingPrererequisiteFlag() throws Exception { + DataModel.FeatureFlag f0 = flagBuilder("feature0") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .version(1) + .build(); + DataModel.FeatureFlag f1 = flagBuilder("feature1") + .on(true) + .fallthrough(fallthroughVariation(1)) + .variations(LDValue.of("nogo"), LDValue.of("go")) + .version(2) + .build(); + upsertFlag(dataStore, f0); + upsertFlag(dataStore, f1); + + client.stringVariationDetail("feature0", context, "default"); + + assertEquals(2, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), f1, LDValue.of("go"), LDValue.ofNull(), "feature0", EvaluationReason.fallthrough()); + checkFeatureEvent(eventSink.events.get(1), f0, LDValue.of("fall"), LDValue.of("default"), null, EvaluationReason.fallthrough()); + } + + @Test + public void eventIsNotSentForUnknownPrererequisiteFlag() throws Exception { + DataModel.FeatureFlag f0 = flagBuilder("feature0") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .version(1) + .build(); + upsertFlag(dataStore, f0); + + client.stringVariation("feature0", context, "default"); + + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), f0, LDValue.of("off"), LDValue.of("default"), null, null); + } + + @Test + public void failureReasonIsGivenForUnknownPrererequisiteFlagIfDetailsWereRequested() throws Exception { + DataModel.FeatureFlag f0 = flagBuilder("feature0") + .on(true) + .prerequisites(prerequisite("feature1", 1)) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of("fall"), LDValue.of("off"), LDValue.of("on")) + .version(1) + .build(); + upsertFlag(dataStore, f0); + + client.stringVariationDetail("feature0", context, "default"); + + assertEquals(1, eventSink.events.size()); + checkFeatureEvent(eventSink.events.get(0), f0, LDValue.of("off"), LDValue.of("default"), null, + EvaluationReason.prerequisiteFailed("feature1")); + } + + @Test + public void canFlush() { + assertEquals(0, eventSink.flushCount); + client.flush(); + assertEquals(1, eventSink.flushCount); + } + + @Test + public void identifyWithEventsDisabledDoesNotCauseError() throws Exception { + LDConfig config = baseConfig() + .events(Components.noEvents()) + .dataSource(Components.externalUpdatesOnly()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + client.identify(context); + } + } + + @Test + public void trackWithEventsDisabledDoesNotCauseError() throws Exception { + LDConfig config = baseConfig() + .events(Components.noEvents()) + .dataSource(Components.externalUpdatesOnly()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + client.track("event", context); + } + } + + @Test + public void flushWithEventsDisabledDoesNotCauseError() throws Exception { + LDConfig config = baseConfig() + .events(Components.noEvents()) + .dataSource(Components.externalUpdatesOnly()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + client.flush(); + } + } + + @Test + public void allFlagsStateGeneratesNoEvaluationEvents() { + DataModel.FeatureFlag flag = flagBuilder("flag") + .on(true) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of(true), LDValue.of(false)) + .version(1) + .build(); + upsertFlag(dataStore, flag); + + FeatureFlagsState state = client.allFlagsState(context); + assertThat(state.toValuesMap(), hasKey(flag.getKey())); + + assertThat(eventSink.events, empty()); + } + + @Test + public void allFlagsStateGeneratesNoPrerequisiteEvaluationEvents() { + DataModel.FeatureFlag flag1 = flagBuilder("flag1") + .on(true) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of(true), LDValue.of(false)) + .version(1) + .build(); + DataModel.FeatureFlag flag0 = flagBuilder("flag0") + .on(true) + .fallthrough(fallthroughVariation(0)) + .offVariation(1) + .variations(LDValue.of(true), LDValue.of(false)) + .prerequisites(new Prerequisite(flag1.getKey(), 0)) + .version(1) + .build(); + upsertFlag(dataStore, flag1); + upsertFlag(dataStore, flag0); + + FeatureFlagsState state = client.allFlagsState(context); + assertThat(state.toValuesMap(), allOf(hasKey(flag0.getKey()), hasKey(flag1.getKey()))); + + assertThat(eventSink.events, empty()); + } + + private void checkFeatureEvent(Event e, DataModel.FeatureFlag flag, LDValue value, LDValue defaultVal, + String prereqOf, EvaluationReason reason) { + assertEquals(Event.FeatureRequest.class, e.getClass()); + Event.FeatureRequest fe = (Event.FeatureRequest)e; + assertEquals(flag.getKey(), fe.getKey()); + assertEquals(context.getKey(), fe.getContext().getKey()); + assertEquals(flag.getVersion(), fe.getVersion()); + assertEquals(value, fe.getValue()); + assertEquals(defaultVal, fe.getDefaultVal()); + assertEquals(prereqOf, fe.getPrereqOf()); + assertEquals(reason, fe.getReason()); + assertEquals(flag.isTrackEvents(), fe.isTrackEvents()); + assertEquals(flag.getDebugEventsUntilDate(), fe.getDebugEventsUntilDate()); + } + + private void checkUnknownFeatureEvent(Event e, String key, LDValue defaultVal, String prereqOf, + EvaluationReason reason) { + assertEquals(Event.FeatureRequest.class, e.getClass()); + Event.FeatureRequest fe = (Event.FeatureRequest)e; + assertEquals(key, fe.getKey()); + assertEquals(context.getKey(), fe.getContext().getKey()); + assertEquals(-1, fe.getVersion()); + assertEquals(-1, fe.getVariation()); + assertEquals(defaultVal, fe.getValue()); + assertEquals(defaultVal, fe.getDefaultVal()); + assertEquals(prereqOf, fe.getPrereqOf()); + assertEquals(reason, fe.getReason()); + assertFalse(fe.isTrackEvents()); + assertNull(fe.getDebugEventsUntilDate()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientExternalUpdatesOnlyTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientExternalUpdatesOnlyTest.java new file mode 100644 index 0000000..ca91095 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientExternalUpdatesOnlyTest.java @@ -0,0 +1,67 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.junit.Test; + +import java.io.IOException; + +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientExternalUpdatesOnlyTest extends BaseTest { + @Test + public void externalUpdatesOnlyClientHasNullDataSource() throws Exception { + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(ComponentsImpl.NullDataSource.class, client.dataSource.getClass()); + } + } + + @Test + public void externalUpdatesOnlyClientHasDefaultEventProcessor() throws Exception { + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.sendEvents()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(DefaultEventProcessorWrapper.class, client.eventProcessor.getClass()); + } + } + + @Test + public void externalUpdatesOnlyClientIsInitialized() throws Exception { + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertTrue(client.isInitialized()); + + assertEquals(DataSourceStatusProvider.State.VALID, client.getDataSourceStatusProvider().getStatus().getState()); + } + } + + @Test + public void externalUpdatesOnlyClientGetsFlagFromDataStore() throws IOException { + DataStore testDataStore = initedDataStore(); + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .dataStore(specificComponent(testDataStore)) + .build(); + DataModel.FeatureFlag flag = flagWithValue("key", LDValue.of(true)); + upsertFlag(testDataStore, flag); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertTrue(client.boolVariation("key", LDContext.create("user"), false)); + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientListenersTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientListenersTest.java new file mode 100644 index 0000000..da871e4 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientListenersTest.java @@ -0,0 +1,329 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.TestComponents.ContextCapturingFactory; +import com.launchdarkly.sdk.server.integrations.MockPersistentDataStore; +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; +import com.launchdarkly.sdk.server.interfaces.FlagValueChangeEvent; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import org.easymock.EasyMockSupport; +import org.junit.Test; + +import java.time.Duration; +import java.time.Instant; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * This file contains tests for all of the event broadcaster/listener functionality in the client, plus + * related methods for looking at the same kinds of status values that can be broadcast to listeners. + * It uses mock implementations of the data source and data store, so that it is only the status + * monitoring mechanisms that are being tested, not the status behavior of specific real components. + *

    + * Parts of this functionality are also covered by lower-level component tests like + * DataSourceUpdatesImplTest. However, the tests here verify that the client is wiring the components + * together correctly so that they work from an application's point of view. + */ +@SuppressWarnings("javadoc") +public class LDClientListenersTest extends BaseTest { + private final static String SDK_KEY = "SDK_KEY"; + + @Test + public void clientSendsFlagChangeEvents() throws Exception { + String flagKey = "flagkey"; + TestData testData = TestData.dataSource(); + testData.update(testData.flag(flagKey).on(true)); + LDConfig config = baseConfig() + .dataSource(testData) + .events(Components.noEvents()) + .build(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + BlockingQueue eventSink1 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink2 = new LinkedBlockingQueue<>(); + FlagChangeListener listener1 = eventSink1::add; + FlagChangeListener listener2 = eventSink2::add; // need to capture the method reference in a variable so it's the same instance when we unregister it + client.getFlagTracker().addFlagChangeListener(listener1); + client.getFlagTracker().addFlagChangeListener(listener2); + + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + testData.update(testData.flag(flagKey).on(false)); + + FlagChangeEvent event1 = awaitValue(eventSink1, 1, TimeUnit.SECONDS); + FlagChangeEvent event2 = awaitValue(eventSink2, 1, TimeUnit.SECONDS); + assertThat(event1.getKey(), equalTo(flagKey)); + assertThat(event2.getKey(), equalTo(flagKey)); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + client.getFlagTracker().removeFlagChangeListener(listener1); + + testData.update(testData.flag(flagKey).on(true)); + + FlagChangeEvent event3 = awaitValue(eventSink2, 1, TimeUnit.SECONDS); + assertThat(event3.getKey(), equalTo(flagKey)); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + } + } + + @Test + public void clientSendsFlagValueChangeEvents() throws Exception { + String flagKey = "important-flag"; + LDContext user = LDContext.create("important-user"); + LDContext otherUser = LDContext.create("unimportant-user"); + + TestData testData = TestData.dataSource(); + testData.update(testData.flag(flagKey).on(false)); + + LDConfig config = baseConfig() + .dataSource(testData) + .events(Components.noEvents()) + .build(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + BlockingQueue eventSink1 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink2 = new LinkedBlockingQueue<>(); + BlockingQueue eventSink3 = new LinkedBlockingQueue<>(); + client.getFlagTracker().addFlagValueChangeListener(flagKey, user, eventSink1::add); + FlagChangeListener listener2 = client.getFlagTracker().addFlagValueChangeListener(flagKey, user, eventSink2::add); + client.getFlagTracker().removeFlagChangeListener(listener2); // just verifying that the remove method works + client.getFlagTracker().addFlagValueChangeListener(flagKey, otherUser, eventSink3::add); + + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + assertNoMoreValues(eventSink3, 100, TimeUnit.MILLISECONDS); + + // make the flag true for the first user only, and broadcast a flag change event + testData.update(testData.flag(flagKey) + .on(true) + .variationForUser(user.getKey(), true) + .fallthroughVariation(false)); + + // eventSink1 receives a value change event + FlagValueChangeEvent event1 = awaitValue(eventSink1, 1, TimeUnit.SECONDS); + assertThat(event1.getKey(), equalTo(flagKey)); + assertThat(event1.getOldValue(), equalTo(LDValue.of(false))); + assertThat(event1.getNewValue(), equalTo(LDValue.of(true))); + assertNoMoreValues(eventSink1, 100, TimeUnit.MILLISECONDS); + + // eventSink2 doesn't receive one, because it was unregistered + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + + // eventSink3 doesn't receive one, because the flag's value hasn't changed for otherUser + assertNoMoreValues(eventSink2, 100, TimeUnit.MILLISECONDS); + } + } + + @Test + public void dataSourceStatusProviderReturnsLatestStatus() throws Exception { + TestData testData = TestData.dataSource(); + LDConfig config = baseConfig() + .dataSource(testData) + .events(Components.noEvents()) + .build(); + + Instant timeBeforeStarting = Instant.now(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + DataSourceStatusProvider.Status initialStatus = client.getDataSourceStatusProvider().getStatus(); + assertThat(initialStatus.getState(), equalTo(DataSourceStatusProvider.State.VALID)); + assertThat(initialStatus.getStateSince(), greaterThanOrEqualTo(timeBeforeStarting)); + assertThat(initialStatus.getLastError(), nullValue()); + + DataSourceStatusProvider.ErrorInfo errorInfo = new DataSourceStatusProvider.ErrorInfo( + DataSourceStatusProvider.ErrorKind.ERROR_RESPONSE, 401, null, Instant.now()); + testData.updateStatus(DataSourceStatusProvider.State.OFF, errorInfo); + + DataSourceStatusProvider.Status newStatus = client.getDataSourceStatusProvider().getStatus(); + assertThat(newStatus.getState(), equalTo(DataSourceStatusProvider.State.OFF)); + assertThat(newStatus.getStateSince(), greaterThanOrEqualTo(errorInfo.getTime())); + assertThat(newStatus.getLastError(), equalTo(errorInfo)); + } + } + + @Test + public void dataSourceStatusProviderSendsStatusUpdates() throws Exception { + TestData testData = TestData.dataSource(); + LDConfig config = baseConfig() + .dataSource(testData) + .events(Components.noEvents()) + .build(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + client.getDataSourceStatusProvider().addStatusListener(statuses::add); + + DataSourceStatusProvider.ErrorInfo errorInfo = new DataSourceStatusProvider.ErrorInfo( + DataSourceStatusProvider.ErrorKind.ERROR_RESPONSE, 401, null, Instant.now()); + testData.updateStatus(DataSourceStatusProvider.State.OFF, errorInfo); + + DataSourceStatusProvider.Status newStatus = statuses.take(); + assertThat(newStatus.getState(), equalTo(DataSourceStatusProvider.State.OFF)); + assertThat(newStatus.getStateSince(), greaterThanOrEqualTo(errorInfo.getTime())); + assertThat(newStatus.getLastError(), equalTo(errorInfo)); + } + } + + @Test + public void dataStoreStatusMonitoringIsDisabledForInMemoryStore() throws Exception { + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.noEvents()) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.getDataStoreStatusProvider().isStatusMonitoringEnabled(), equalTo(false)); + } + } + + @Test + public void dataStoreStatusMonitoringIsEnabledForPersistentStore() throws Exception { + LDConfig config = baseConfig() + .dataSource(Components.externalUpdatesOnly()) + .dataStore( + Components.persistentDataStore(TestComponents.specificComponent(new MockPersistentDataStore())) + ) + .events(Components.noEvents()) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.getDataStoreStatusProvider().isStatusMonitoringEnabled(), equalTo(true)); + } + } + + @Test + public void dataStoreStatusProviderReturnsLatestStatus() throws Exception { + ComponentConfigurer underlyingStoreFactory = Components.persistentDataStore( + TestComponents.specificComponent(new MockPersistentDataStore())); + ContextCapturingFactory capturingFactory = new ContextCapturingFactory<>(underlyingStoreFactory); + LDConfig config = baseConfig() + .dataStore(capturingFactory) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + DataStoreStatusProvider.Status originalStatus = new DataStoreStatusProvider.Status(true, false); + DataStoreStatusProvider.Status newStatus = new DataStoreStatusProvider.Status(false, false); + assertThat(client.getDataStoreStatusProvider().getStatus(), equalTo(originalStatus)); + capturingFactory.clientContext.getDataStoreUpdateSink().updateStatus(newStatus); + assertThat(client.getDataStoreStatusProvider().getStatus(), equalTo(newStatus)); + } + } + + @Test + public void dataStoreStatusProviderSendsStatusUpdates() throws Exception { + ComponentConfigurer underlyingStoreFactory = Components.persistentDataStore( + TestComponents.specificComponent(new MockPersistentDataStore())); + ContextCapturingFactory capturingFactory = new ContextCapturingFactory<>(underlyingStoreFactory); + LDConfig config = baseConfig() + .dataStore(capturingFactory) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + client.getDataStoreStatusProvider().addStatusListener(statuses::add); + + DataStoreStatusProvider.Status newStatus = new DataStoreStatusProvider.Status(false, false); + capturingFactory.clientContext.getDataStoreUpdateSink().updateStatus(newStatus); + + assertThat(statuses.take(), equalTo(newStatus)); + } + } + + @Test + public void eventsAreDispatchedOnTaskThread() throws Exception { + int desiredPriority = Thread.MAX_PRIORITY - 1; + BlockingQueue capturedThreads = new LinkedBlockingQueue<>(); + + TestData testData = TestData.dataSource(); + testData.update(testData.flag("flagkey").on(true)); + LDConfig config = baseConfig() + .dataSource(testData) + .events(Components.noEvents()) + .threadPriority(desiredPriority) + .build(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + client.getFlagTracker().addFlagChangeListener(params -> { + capturedThreads.add(Thread.currentThread()); + }); + + testData.update(testData.flag("flagkey").on(false)); + + Thread handlerThread = capturedThreads.take(); + + assertEquals(desiredPriority, handlerThread.getPriority()); + assertThat(handlerThread.getName(), containsString("LaunchDarkly-tasks")); + } + } + + @Test + public void bigSegmentStoreStatusReturnsUnavailableStatusWhenNotConfigured() throws Exception { + LDConfig config = baseConfig().build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + BigSegmentStoreStatusProvider.Status status = client.getBigSegmentStoreStatusProvider().getStatus(); + assertFalse(status.isAvailable()); + assertFalse(status.isStale()); + } + } + + @Test + public void bigSegmentStoreStatusProviderSendsStatusUpdates() throws Exception { + EasyMockSupport mocks = new EasyMockSupport(); + AtomicBoolean storeAvailable = new AtomicBoolean(true); + BigSegmentStore storeMock = mocks.niceMock(BigSegmentStore.class); + expect(storeMock.getMetadata()).andAnswer(() -> { + if (storeAvailable.get()) { + return new BigSegmentStoreTypes.StoreMetadata(System.currentTimeMillis()); + } + throw new RuntimeException("sorry"); + }).anyTimes(); + + ComponentConfigurer storeFactory = specificComponent(storeMock); + + replay(storeMock); + + LDConfig config = baseConfig() + .bigSegments( + Components.bigSegments(storeFactory).statusPollInterval(Duration.ofMillis(10)) + ) + .build(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + BigSegmentStoreStatusProvider.Status status1 = client.getBigSegmentStoreStatusProvider().getStatus(); + assertTrue(status1.isAvailable()); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + client.getBigSegmentStoreStatusProvider().addStatusListener(statuses::add); + + storeAvailable.set(false); + BigSegmentStoreStatusProvider.Status status = statuses.take(); + assertFalse(status.isAvailable()); + assertEquals(status, client.getBigSegmentStoreStatusProvider().getStatus()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientOfflineTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientOfflineTest.java new file mode 100644 index 0000000..ccbdc1a --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientOfflineTest.java @@ -0,0 +1,80 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataStore; + +import org.junit.Test; + +import java.io.IOException; + +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDClientOfflineTest extends BaseTest { + private static final LDContext user = LDContext.create("user"); + + @Test + public void offlineClientHasNullDataSource() throws IOException { + LDConfig config = baseConfig() + .offline(true) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(ComponentsImpl.NullDataSource.class, client.dataSource.getClass()); + } + } + + @Test + public void offlineClientHasNoOpEventProcessor() throws IOException { + LDConfig config = baseConfig() + .offline(true) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(NoOpEventProcessor.class, client.eventProcessor.getClass()); + } + } + + @Test + public void offlineClientIsInitialized() throws IOException { + LDConfig config = baseConfig() + .offline(true) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertTrue(client.isInitialized()); + + assertEquals(DataSourceStatusProvider.State.VALID, client.getDataSourceStatusProvider().getStatus().getState()); + } + } + + @Test + public void offlineClientReturnsDefaultValue() throws IOException { + LDConfig config = baseConfig() + .offline(true) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals("x", client.stringVariation("key", user, "x")); + } + } + + @Test + public void offlineClientGetsFlagsStateFromDataStore() throws IOException { + DataStore testDataStore = initedDataStore(); + LDConfig config = baseConfig() + .offline(true) + .dataStore(specificComponent(testDataStore)) + .build(); + upsertFlag(testDataStore, flagWithValue("key", LDValue.of(true))); + try (LDClient client = new LDClient("SDK_KEY", config)) { + FeatureFlagsState state = client.allFlagsState(user); + assertTrue(state.isValid()); + assertEquals(ImmutableMap.of("key", LDValue.of(true)), state.toValuesMap()); + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientTest.java new file mode 100644 index 0000000..af894e8 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDClientTest.java @@ -0,0 +1,497 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.integrations.MockPersistentDataStore; +import com.launchdarkly.sdk.server.integrations.Hook; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; + +import org.easymock.Capture; +import org.easymock.EasyMockSupport; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.Collections; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static com.launchdarkly.sdk.server.ModelBuilders.flagWithValue; +import static com.launchdarkly.sdk.server.TestComponents.dataStoreThatThrowsException; +import static com.launchdarkly.sdk.server.TestComponents.failedDataSource; +import static com.launchdarkly.sdk.server.TestComponents.initedDataStore; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static com.launchdarkly.sdk.server.TestUtil.upsertFlag; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.mock; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * See also LDClientEvaluationTest, etc. This file contains mostly tests for the startup logic. + */ +@SuppressWarnings("javadoc") +public class LDClientTest extends BaseTest { + private final static String SDK_KEY = "SDK_KEY"; + + private DataSource dataSource; + private EventProcessor eventProcessor; + private Future initFuture; + private LDClientInterface client; + private final EasyMockSupport mocks = new EasyMockSupport(); + + @SuppressWarnings("unchecked") + @Before + public void before() { + dataSource = mocks.createStrictMock(DataSource.class); + eventProcessor = mocks.createStrictMock(EventProcessor.class); + initFuture = mocks.createStrictMock(Future.class); + } + + @Test + public void constructorThrowsExceptionForNullSdkKey() throws Exception { + try (LDClient client = new LDClient(null)) { + fail("expected exception"); + } catch (NullPointerException e) { + assertEquals("sdkKey must not be null", e.getMessage()); + } + } + + @Test + public void constructorWithConfigThrowsExceptionForNullSdkKey() throws Exception { + try (LDClient client = new LDClient(null, new LDConfig.Builder().build())) { + fail("expected exception"); + } catch (NullPointerException e) { + assertEquals("sdkKey must not be null", e.getMessage()); + } + } + + @Test + public void constructorThrowsExceptionForSdkKeyWithControlCharacter() throws Exception { + try (LDClient client = new LDClient(SDK_KEY + "\n")) { + fail("expected exception"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), not(containsString(SDK_KEY))); + } + } + + @Test + public void constructorWithConfigThrowsExceptionForSdkKeyWithControlCharacter() throws Exception { + try (LDClient client = new LDClient(SDK_KEY + "\n", LDConfig.DEFAULT)) { + fail("expected exception"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), not(containsString(SDK_KEY))); + } + } + + @Test + public void constructorAllowsSdkKeyToBeEmpty() throws Exception { + // It may seem counter-intuitive to allow this, but if someone is using the SDK in offline + // mode, or with a file data source or a test fixture, they may reasonably assume that it's + // OK to pass an empty string since the key won't actually be used. + try (LDClient client = new LDClient("", baseConfig().build())) {} + } + + @Test + public void constructorThrowsExceptionForNullConfig() throws Exception { + try (LDClient client = new LDClient(SDK_KEY, null)) { + fail("expected exception"); + } catch (NullPointerException e) { + assertEquals("config must not be null", e.getMessage()); + } + } + + @Test + public void clientHasDefaultEventProcessorWithDefaultConfig() throws Exception { + LDConfig config = new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .diagnosticOptOut(true) + .logging(Components.logging(testLogging)) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(DefaultEventProcessorWrapper.class, client.eventProcessor.getClass()); + } + } + + @Test + public void clientHasDefaultEventProcessorWithSendEvents() throws Exception { + LDConfig config = new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.sendEvents()) + .diagnosticOptOut(true) + .logging(Components.logging(testLogging)) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(DefaultEventProcessorWrapper.class, client.eventProcessor.getClass()); + } + } + + @Test + public void clientHasNoOpEventProcessorWithNoEvents() throws Exception { + LDConfig config = new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .build(); + try (LDClient client = new LDClient("SDK_KEY", config)) { + assertEquals(NoOpEventProcessor.class, client.eventProcessor.getClass()); + } + } + + @Test + public void streamingClientHasStreamProcessor() throws Exception { + LDConfig config = new LDConfig.Builder() + .serviceEndpoints(Components.serviceEndpoints().streaming("http://fake")) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertEquals(StreamProcessor.class, client.dataSource.getClass()); + } + } + + @Test + public void canSetCustomStreamingEndpoint() throws Exception { + String base = "http://fake"; + URI baseUri = URI.create(base); + String expected = base + StandardEndpoints.STREAMING_REQUEST_PATH; + LDConfig config = new LDConfig.Builder() + .serviceEndpoints(Components.serviceEndpoints().streaming(baseUri)) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertEquals(expected, ((StreamProcessor) client.dataSource).streamUri.toString()); + } + } + + @Test + public void pollingClientHasPollingProcessor() throws IOException { + LDConfig config = new LDConfig.Builder() + .dataSource(Components.pollingDataSource()) + .serviceEndpoints(Components.serviceEndpoints().polling("http://fake")) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertEquals(PollingProcessor.class, client.dataSource.getClass()); + } + } + + @Test + public void canSetCustomPollingEndpoint() throws Exception { + URI pu = URI.create("http://fake"); + LDConfig config = new LDConfig.Builder() + .dataSource(Components.pollingDataSource()) + .serviceEndpoints(Components.serviceEndpoints().polling(pu)) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + String actual = ((DefaultFeatureRequestor) ((PollingProcessor) client.dataSource).requestor).pollingUri.toString(); + assertThat(actual, containsString(pu.toString())); + } + } + + @Test + public void canSetHooks() throws Exception { + LDConfig config1 = new LDConfig.Builder() + .build(); + try (LDClient client = new LDClient(SDK_KEY, config1)) { + assertNotEquals(EvaluatorWithHooks.class, client.evaluator.getClass()); + } + + LDConfig config2 = new LDConfig.Builder() + .hooks(Components.hooks().setHooks(Collections.singletonList(mock(Hook.class)))) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config2)) { + assertEquals(EvaluatorWithHooks.class, client.evaluator.getClass()); + } + } + + @Test + public void sameDiagnosticStorePassedToFactoriesWhenSupported() throws IOException { + @SuppressWarnings("unchecked") + ComponentConfigurer mockDataSourceFactory = mocks.createStrictMock(ComponentConfigurer.class); + + LDConfig config = new LDConfig.Builder() + .serviceEndpoints(Components.serviceEndpoints().events("fake-host")) // event processor will try to send a diagnostic event here + .dataSource(mockDataSourceFactory) + .events(Components.sendEvents()) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + + Capture capturedDataSourceContext = Capture.newInstance(); + expect(mockDataSourceFactory.build(capture(capturedDataSourceContext))).andReturn(failedDataSource()); + + mocks.replayAll(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + mocks.verifyAll(); + assertNotNull(ClientContextImpl.get(capturedDataSourceContext.getValue()).diagnosticStore); + } + } + + @Test + public void nullDiagnosticStorePassedToFactoriesWhenOptedOut() throws IOException { + @SuppressWarnings("unchecked") + ComponentConfigurer mockDataSourceFactory = mocks.createStrictMock(ComponentConfigurer.class); + + LDConfig config = new LDConfig.Builder() + .dataSource(mockDataSourceFactory) + .diagnosticOptOut(true) + .logging(Components.logging(testLogging)) + .startWait(Duration.ZERO) + .build(); + + Capture capturedDataSourceContext = Capture.newInstance(); + expect(mockDataSourceFactory.build(capture(capturedDataSourceContext))).andReturn(failedDataSource()); + + mocks.replayAll(); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + mocks.verifyAll(); + assertNull(ClientContextImpl.get(capturedDataSourceContext.getValue()).diagnosticStore); + } + } + + @Test + public void noWaitForDataSourceIfWaitMillisIsZero() throws Exception { + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO); + + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(false); + mocks.replayAll(); + + client = createMockClient(config); + assertFalse(client.isInitialized()); + + mocks.verifyAll(); + } + + @Test + public void willWaitForDataSourceIfWaitMillisIsGreaterThanZero() throws Exception { + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ofMillis(10)); + + expect(dataSource.start()).andReturn(initFuture); + expect(initFuture.get(10L, TimeUnit.MILLISECONDS)).andReturn(null); + expect(dataSource.isInitialized()).andReturn(false).anyTimes(); + mocks.replayAll(); + + client = createMockClient(config); + assertFalse(client.isInitialized()); + + mocks.verifyAll(); + } + + @Test + public void noWaitForDataSourceIfWaitMillisIsNegative() throws Exception { + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ofMillis(-10)); + + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(false); + mocks.replayAll(); + + client = createMockClient(config); + assertFalse(client.isInitialized()); + + mocks.verifyAll(); + } + + @Test + public void dataSourceCanTimeOut() throws Exception { + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ofMillis(10)); + + expect(dataSource.start()).andReturn(initFuture); + expect(initFuture.get(10L, TimeUnit.MILLISECONDS)).andThrow(new TimeoutException()); + expect(dataSource.isInitialized()).andReturn(false).anyTimes(); + mocks.replayAll(); + + client = createMockClient(config); + assertFalse(client.isInitialized()); + + mocks.verifyAll(); + } + + @Test + public void clientCatchesRuntimeExceptionFromDataSource() throws Exception { + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ofMillis(10)); + + expect(dataSource.start()).andReturn(initFuture); + expect(initFuture.get(10L, TimeUnit.MILLISECONDS)).andThrow(new RuntimeException()); + expect(dataSource.isInitialized()).andReturn(false).anyTimes(); + mocks.replayAll(); + + client = createMockClient(config); + assertFalse(client.isInitialized()); + + mocks.verifyAll(); + } + + @Test + public void isFlagKnownReturnsTrueForExistingFlag() throws Exception { + DataStore testDataStore = initedDataStore(); + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO) + .dataStore(specificComponent(testDataStore)); + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(true).times(1); + mocks.replayAll(); + + client = createMockClient(config); + + upsertFlag(testDataStore, flagWithValue("key", LDValue.of(1))); + assertTrue(client.isFlagKnown("key")); + mocks.verifyAll(); + } + + @Test + public void isFlagKnownReturnsFalseForUnknownFlag() throws Exception { + DataStore testDataStore = initedDataStore(); + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO) + .dataStore(specificComponent(testDataStore)); + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(true).times(1); + mocks.replayAll(); + + client = createMockClient(config); + + assertFalse(client.isFlagKnown("key")); + mocks.verifyAll(); + } + + @Test + public void isFlagKnownReturnsFalseIfStoreAndClientAreNotInitialized() throws Exception { + DataStore testDataStore = new InMemoryDataStore(); + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO) + .dataStore(specificComponent(testDataStore)); + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(false).times(1); + mocks.replayAll(); + + client = createMockClient(config); + + upsertFlag(testDataStore, flagWithValue("key", LDValue.of(1))); + assertFalse(client.isFlagKnown("key")); + mocks.verifyAll(); + } + + @Test + public void isFlagKnownUsesStoreIfStoreIsInitializedButClientIsNot() throws Exception { + DataStore testDataStore = initedDataStore(); + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO) + .dataStore(specificComponent(testDataStore)); + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(false).times(1); + mocks.replayAll(); + + client = createMockClient(config); + + upsertFlag(testDataStore, flagWithValue("key", LDValue.of(1))); + assertTrue(client.isFlagKnown("key")); + mocks.verifyAll(); + } + + @Test + public void isFlagKnownCatchesExceptionFromDataStore() throws Exception { + DataStore badStore = dataStoreThatThrowsException(new RuntimeException("sorry")); + LDConfig.Builder config = new LDConfig.Builder() + .startWait(Duration.ZERO) + .dataStore(specificComponent(badStore)); + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(false).times(1); + mocks.replayAll(); + + client = createMockClient(config); + + assertFalse(client.isFlagKnown("key")); + } + + @Test + public void getVersion() throws Exception { + LDConfig config = new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .events(Components.noEvents()) + .logging(Components.logging(testLogging)) + .build(); + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertEquals(Version.SDK_VERSION, client.version()); + } + } + + @Test + public void canGetCacheStatsFromDataStoreStatusProvider() throws Exception { + LDConfig config1 = baseConfig().build(); + try (LDClient client1 = new LDClient(SDK_KEY, config1)) { + assertNull(client1.getDataStoreStatusProvider().getCacheStats()); + } + + LDConfig config2 = baseConfig() + .dataStore(Components.persistentDataStore(c -> new MockPersistentDataStore())) + .build(); + try (LDClient client2 = new LDClient(SDK_KEY, config2)) { + DataStoreStatusProvider.CacheStats expectedStats = new DataStoreStatusProvider.CacheStats(0, 0, 0, 0, 0, 0); + assertEquals(expectedStats, client2.getDataStoreStatusProvider().getCacheStats()); + } + } + + @Test + public void testSecureModeHash() throws IOException { + setupMockDataSourceToInitialize(true); + LDContext context = LDContext.create("userkey"); + LDContext contextAsUser = LDContext.create(context.getKey()); + String expectedHash = "c097a70924341660427c2e487b86efee789210f9e6dafc3b5f50e75bc596ff99"; + + client = createMockClient(new LDConfig.Builder() + .startWait(Duration.ZERO)); + assertEquals(expectedHash, client.secureModeHash(context)); + assertEquals(expectedHash, client.secureModeHash(contextAsUser)); + + assertNull(client.secureModeHash(null)); + assertNull(client.secureModeHash(LDContext.create(null))); // invalid context + } + + private void setupMockDataSourceToInitialize(boolean willInitialize) { + expect(dataSource.start()).andReturn(initFuture); + expect(dataSource.isInitialized()).andReturn(willInitialize); + mocks.replayAll(); + } + + private LDClient createMockClient(LDConfig.Builder config) { + config.dataSource(specificComponent(dataSource)); + config.events(specificComponent(eventProcessor)); + config.logging(Components.logging(testLogging)); + return new LDClient(SDK_KEY, config.build()); + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDConfigTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDConfigTest.java new file mode 100644 index 0000000..100cac1 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/LDConfigTest.java @@ -0,0 +1,268 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.server.integrations.ApplicationInfoBuilder; +import com.launchdarkly.sdk.server.integrations.BigSegmentsConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.HooksConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.LoggingConfigurationBuilder; +import com.launchdarkly.sdk.server.integrations.ServiceEndpointsBuilder; +import com.launchdarkly.sdk.server.integrations.WrapperInfoBuilder; +import com.launchdarkly.sdk.server.integrations.Hook; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import org.junit.Test; + +import java.net.URI; +import java.time.Duration; +import java.util.Collections; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static org.easymock.EasyMock.mock; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class LDConfigTest { + private static final ClientContext BASIC_CONTEXT = new ClientContext(""); + + @Test + public void defaults() { + LDConfig config = new LDConfig.Builder().build(); + assertNotNull(config.bigSegments); + assertNull(config.bigSegments.build(clientContext("", config)).getStore()); + assertNotNull(config.dataSource); + assertEquals(Components.streamingDataSource().getClass(), config.dataSource.getClass()); + assertNotNull(config.dataStore); + assertEquals(Components.inMemoryDataStore().getClass(), config.dataStore.getClass()); + assertFalse(config.diagnosticOptOut); + assertNotNull(config.events); + assertEquals(Components.sendEvents().getClass(), config.events.getClass()); + assertFalse(config.offline); + + assertNotNull(config.http); + HttpConfiguration httpConfig = config.http.build(BASIC_CONTEXT); + assertEquals(HttpConfigurationBuilder.DEFAULT_CONNECT_TIMEOUT, httpConfig.getConnectTimeout()); + + assertNotNull(config.logging); + LoggingConfiguration loggingConfig = config.logging.build(BASIC_CONTEXT); + assertEquals(LoggingConfigurationBuilder.DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER, + loggingConfig.getLogDataSourceOutageAsErrorAfter()); + + assertEquals(LDConfig.DEFAULT_START_WAIT, config.startWait); + assertEquals(Thread.MIN_PRIORITY, config.threadPriority); + assertNull(config.wrapperInfo); + } + + @Test + public void bigSegmentsConfigFactory() { + BigSegmentsConfigurationBuilder f = Components.bigSegments(null); + LDConfig config = new LDConfig.Builder().bigSegments(f).build(); + assertSame(f, config.bigSegments); + } + + @Test + public void dataSourceFactory() { + ComponentConfigurer f = specificComponent(null); + LDConfig config = new LDConfig.Builder().dataSource(f).build(); + assertSame(f, config.dataSource); + } + + @Test + public void dataStoreFactory() { + ComponentConfigurer f = specificComponent(null); + LDConfig config = new LDConfig.Builder().dataStore(f).build(); + assertSame(f, config.dataStore); + } + + @Test + public void diagnosticOptOut() { + LDConfig config = new LDConfig.Builder().diagnosticOptOut(true).build(); + assertTrue(config.diagnosticOptOut); + + LDConfig config1 = new LDConfig.Builder().diagnosticOptOut(true).diagnosticOptOut(false).build(); + assertFalse(config1.diagnosticOptOut); + } + + @Test + public void eventProcessorFactory() { + ComponentConfigurer f = specificComponent(null); + LDConfig config = new LDConfig.Builder().events(f).build(); + assertSame(f, config.events); + } + + @Test + public void offline() { + LDConfig config1 = new LDConfig.Builder().offline(true).build(); + assertTrue(config1.offline); + assertSame(Components.externalUpdatesOnly(), config1.dataSource); + assertSame(Components.noEvents(), config1.events); + + LDConfig config2 = new LDConfig.Builder().offline(true).dataSource(Components.streamingDataSource()).build(); + assertTrue(config2.offline); + assertSame(Components.externalUpdatesOnly(), config2.dataSource); // offline overrides specified factory + assertSame(Components.noEvents(), config2.events); + + LDConfig config3 = new LDConfig.Builder().offline(true).offline(false).build(); + assertFalse(config3.offline); // just testing that the setter works for both true and false + } + + @Test + public void hooks() { + Hook mockHook = mock(Hook.class); + HooksConfigurationBuilder b = Components.hooks().setHooks(Collections.singletonList(mockHook)); + LDConfig config = new LDConfig.Builder().hooks(b).build(); + assertEquals(1, config.hooks.getHooks().size()); + assertEquals(mockHook, config.hooks.getHooks().get(0)); + } + + @Test + public void http() { + HttpConfigurationBuilder b = Components.httpConfiguration().connectTimeout(Duration.ofSeconds(9)); + LDConfig config = new LDConfig.Builder().http(b).build(); + assertEquals(Duration.ofSeconds(9), + config.http.build(BASIC_CONTEXT).getConnectTimeout()); + } + + @Test + public void wrapperInfo() { + LDConfig config = new LDConfig.Builder() + .wrapper(Components.wrapperInfo().wrapperName("the-name").wrapperVersion("the-version")).build(); + HttpConfiguration httpConfiguration = config.http.build( + ClientContextImpl.fromConfig("", config, null)); + AtomicBoolean headerFound = new AtomicBoolean(false); + httpConfiguration.getDefaultHeaders().forEach(entry -> { + if(entry.getKey().compareTo("X-LaunchDarkly-Wrapper") == 0) { + if(entry.getValue().compareTo("the-name/the-version") == 0) { + headerFound.set(true); + } + } + }); + assertTrue(headerFound.get()); + } + + @Test + public void logging() { + LoggingConfigurationBuilder b = Components.logging().logDataSourceOutageAsErrorAfter(Duration.ofSeconds(9)); + LDConfig config = new LDConfig.Builder().logging(b).build(); + assertEquals(Duration.ofSeconds(9), + config.logging.build(BASIC_CONTEXT).getLogDataSourceOutageAsErrorAfter()); + } + + @Test + public void startWait() { + LDConfig config = new LDConfig.Builder().startWait(Duration.ZERO).build(); + assertEquals(Duration.ZERO, config.startWait); + + LDConfig config1 = new LDConfig.Builder().startWait(Duration.ZERO).startWait(null).build(); + assertEquals(LDConfig.DEFAULT_START_WAIT, config1.startWait); + } + + @Test + public void threadPriority() { + LDConfig config = new LDConfig.Builder().threadPriority(Thread.MAX_PRIORITY).build(); + assertEquals(Thread.MAX_PRIORITY, config.threadPriority); + } + + @Test + public void testHttpDefaults() { + LDConfig config = new LDConfig.Builder().build(); + HttpConfiguration hc = config.http.build(BASIC_CONTEXT); + HttpConfiguration defaults = Components.httpConfiguration().build(BASIC_CONTEXT); + assertEquals(defaults.getConnectTimeout(), hc.getConnectTimeout()); + assertNull(hc.getProxy()); + assertNull(hc.getProxyAuthentication()); + assertEquals(defaults.getSocketTimeout(), hc.getSocketTimeout()); + assertNull(hc.getSslSocketFactory()); + assertNull(hc.getTrustManager()); + assertEquals(ImmutableMap.copyOf(defaults.getDefaultHeaders()), ImmutableMap.copyOf(hc.getDefaultHeaders())); + } + + @Test + public void fromConfig() { + BigSegmentsConfigurationBuilder bigSegments = Components.bigSegments(null); + ComponentConfigurer dataSource = specificComponent(null); + ComponentConfigurer eventProcessor = specificComponent(null); + Hook mockHook = mock(Hook.class); + HooksConfigurationBuilder hooksBuilder = Components.hooks().setHooks(Collections.singletonList(mockHook)); + HttpConfigurationBuilder http = Components.httpConfiguration().connectTimeout(Duration.ofSeconds(9)); + WrapperInfoBuilder wrapperInfo = Components.wrapperInfo().wrapperName("the-name").wrapperVersion("the-version"); + ApplicationInfoBuilder applicationInfo = Components.applicationInfo().applicationId("test").applicationVersion("version"); + ServiceEndpointsBuilder serviceEndpoints = Components.serviceEndpoints() + .polling("polling").streaming("stream").events("events"); + + LDConfig config = new LDConfig.Builder() + .applicationInfo(applicationInfo) + .bigSegments(bigSegments) + .dataSource(dataSource) + .events(eventProcessor) + .diagnosticOptOut(true) + .offline(false) // To keep the data source from being removed in the build. + .hooks(hooksBuilder) + .http(http) + .serviceEndpoints(serviceEndpoints) + .wrapper(wrapperInfo).build(); + + LDConfig config2 = LDConfig.Builder.fromConfig(config).build(); + + assertSame(bigSegments, config2.bigSegments); + assertSame(dataSource, config2.dataSource); + assertSame(eventProcessor, config2.events); + assertSame(http, config2.http); + assertFalse(config2.offline); + assertTrue(config2.diagnosticOptOut); + assertEquals("test", config2.applicationInfo.getApplicationId()); + assertEquals("version", config2.applicationInfo.getApplicationVersion()); + assertEquals("the-name", config2.wrapperInfo.getWrapperName()); + assertEquals("the-version", config2.wrapperInfo.getWrapperVersion()); + + assertEquals(URI.create("polling"), config2.serviceEndpoints.getPollingBaseUri()); + assertEquals(URI.create("stream"), config2.serviceEndpoints.getStreamingBaseUri()); + assertEquals(URI.create("events"), config2.serviceEndpoints.getEventsBaseUri()); + + assertEquals(mockHook, config2.hooks.getHooks().get(0)); + } + + @Test + public void fromConfigDefault() { + LDConfig config = LDConfig.Builder.fromConfig(new LDConfig.Builder().build()).build(); + assertNotNull(config.bigSegments); + assertNull(config.bigSegments.build(clientContext("", config)).getStore()); + assertNotNull(config.dataSource); + assertEquals(Components.streamingDataSource().getClass(), config.dataSource.getClass()); + assertNotNull(config.dataStore); + assertEquals(Components.inMemoryDataStore().getClass(), config.dataStore.getClass()); + assertFalse(config.diagnosticOptOut); + assertNotNull(config.events); + assertEquals(Components.sendEvents().getClass(), config.events.getClass()); + assertFalse(config.offline); + + assertNotNull(config.hooks.getHooks()); + assertEquals(0, config.hooks.getHooks().size()); + + assertNotNull(config.http); + HttpConfiguration httpConfig = config.http.build(BASIC_CONTEXT); + assertEquals(HttpConfigurationBuilder.DEFAULT_CONNECT_TIMEOUT, httpConfig.getConnectTimeout()); + + assertNotNull(config.logging); + LoggingConfiguration loggingConfig = config.logging.build(BASIC_CONTEXT); + assertEquals(LoggingConfigurationBuilder.DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER, + loggingConfig.getLogDataSourceOutageAsErrorAfter()); + + assertEquals(LDConfig.DEFAULT_START_WAIT, config.startWait); + assertEquals(Thread.MIN_PRIORITY, config.threadPriority); + assertNull(config.wrapperInfo); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationBuilderTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationBuilderTests.java new file mode 100644 index 0000000..15aa3d5 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationBuilderTests.java @@ -0,0 +1,46 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationBuilder; +import com.launchdarkly.sdk.server.migrations.MigrationMethodResult; +import org.junit.Test; + +import java.util.Optional; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +public class MigrationBuilderTests extends BaseTest { + private final TestData testData = TestData.dataSource(); + + private final LDClientInterface client = new LDClient("SDK_KEY", baseConfig() + .dataSource(testData) + .build()); + + @Test + public void itCanMakeABasicMigration() { + MigrationBuilder builder = new MigrationBuilder<>(client); + builder.read((Void) -> MigrationMethodResult.Success("Old"), (Void)-> MigrationMethodResult.Success("New")); + builder.write((Void) -> MigrationMethodResult.Success("Old"), (Void)-> MigrationMethodResult.Success("New")); + Optional> migration = builder.build(); + assertTrue(migration.isPresent()); + } + + @Test + public void itDoesNotCreateAMigrationIfReadImplementationIsNotSet() { + MigrationBuilder builder = new MigrationBuilder<>(client); + builder.write((Void) -> MigrationMethodResult.Success("Old"), (Void)-> MigrationMethodResult.Success("New")); + Optional> migration = builder.build(); + assertFalse(migration.isPresent()); + } + + @Test + public void itDoesNotCreateAMigrationIfWriteImplementationIsNotSet() { + MigrationBuilder builder = new MigrationBuilder<>(client); + builder.read((Void) -> MigrationMethodResult.Success("Old"), (Void)-> MigrationMethodResult.Success("New")); + Optional> migration = builder.build(); + assertFalse(migration.isPresent()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationConsistencyCheckTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationConsistencyCheckTest.java new file mode 100644 index 0000000..0187324 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationConsistencyCheckTest.java @@ -0,0 +1,117 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationBuilder; +import com.launchdarkly.sdk.server.migrations.MigrationExecution; +import com.launchdarkly.sdk.server.migrations.MigrationMethodResult; +import com.launchdarkly.sdk.server.migrations.MigrationSerialOrder; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.Objects; +import java.util.Optional; + +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@RunWith(Parameterized.class) +public class MigrationConsistencyCheckTest extends BaseTest { + public TestComponents.TestEventProcessor eventSink = new TestComponents.TestEventProcessor(); + public final TestData testData = TestData.dataSource(); + + public final LDClientInterface client = new LDClient("SDK_KEY", baseConfig() + .dataSource(testData) + .events(specificComponent(eventSink)) + .build()); + + public Migration migration; + + public String readOldResult = ""; + public String readNewResult = ""; + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + {MigrationExecution.Parallel()}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED)}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM)} + }); + } + + public MigrationConsistencyCheckTest(MigrationExecution execution) { + MigrationBuilder builder = new MigrationBuilder(client) + .readExecution(execution) + .read( + (payload) -> MigrationMethodResult.Success(readOldResult), + (payload) -> MigrationMethodResult.Success(readNewResult), + (a, b) -> Objects.equals(a, b)) + .write((payload) -> { + throw new RuntimeException("old write"); + }, (payload) -> { + throw new RuntimeException("new write"); + }); + Optional> res = builder.build(); + assertTrue(res.isPresent()); + migration = res.get(); + } + + @Test + public void itFindsResultsConsistentWhenTheyAre() { + readOldResult = "consistent"; + readNewResult = "consistent"; + + migration.read("test-flag", + LDContext.create("user-key"), MigrationStage.LIVE); + + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNotNull(me.getConsistencyMeasurement()); + assertTrue(me.getConsistencyMeasurement().isConsistent()); + } + + @Test + public void itFindsResultsInconsistentWhenTheyAre() { + readOldResult = "consistent"; + readNewResult = "inconsistent"; + + migration.read("test-flag", + LDContext.create("user-key"), MigrationStage.LIVE); + + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNotNull(me.getConsistencyMeasurement()); + assertFalse(me.getConsistencyMeasurement().isConsistent()); + } + + @Test + public void itDoesNotRunTheCheckIfCheckRatioIsZero() { + readOldResult = "consistent"; + readNewResult = "inconsistent"; + + testData.update(testData.flag("test-flag") + .on(true) + .valueForAll(LDValue.of("shadow")) + .migrationCheckRatio(0)); + + migration.read("test-flag", + LDContext.create("user-key"), MigrationStage.LIVE); + + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNull(me.getConsistencyMeasurement()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationExecutionFixture.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationExecutionFixture.java new file mode 100644 index 0000000..b91bf48 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationExecutionFixture.java @@ -0,0 +1,82 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationBuilder; +import com.launchdarkly.sdk.server.migrations.MigrationExecution; +import com.launchdarkly.sdk.server.migrations.MigrationMethodResult; + +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; + +import java.util.Optional; + +import static org.junit.Assert.assertTrue; + +/** + * This fixtures simplifies tests which require tracking the execution of the various migration methods. + */ +public class MigrationExecutionFixture extends BaseTest { + public final TestData testData = TestData.dataSource(); + public final String flagKey = "test-flag"; + + public TestComponents.TestEventProcessor eventSink = new TestComponents.TestEventProcessor(); + + public final LDClientInterface client = new LDClient("SDK_KEY", baseConfig() + .dataSource(testData) + .events(specificComponent(eventSink)) + .build()); + + public Migration migration; + + public boolean readOldCalled = false; + public boolean writeOldCalled = false; + public boolean readNewCalled = false; + public boolean writeNewCalled = false; + + public boolean failOldWrite = false; + + public boolean failNewWrite = false; + + public boolean failOldRead = false; + + public boolean failNewRead = false; + + public String payloadReadOld = null; + public String payloadReadNew = null; + + public String payloadWriteOld = null; + public String payloadWriteNew = null; + + public MigrationExecutionFixture(MigrationExecution execution) { + this(execution, true, true); + } + + public MigrationExecutionFixture(MigrationExecution execution, boolean trackLatency, boolean trackErrors) { + MigrationBuilder builder = new MigrationBuilder(client). + readExecution(execution) + .trackLatency(trackLatency) + .trackErrors(trackErrors) + .read((payload) -> { + readOldCalled = true; + payloadReadOld = payload; + return failOldRead ? MigrationMethodResult.Failure() : MigrationMethodResult.Success("Old"); + }, (payload) -> { + readNewCalled = true; + payloadReadNew = payload; + return failNewRead ? MigrationMethodResult.Failure() : MigrationMethodResult.Success("New"); + }) + .write((payload) -> { + writeOldCalled = true; + payloadWriteOld = payload; + return failOldWrite ? MigrationMethodResult.Failure() : MigrationMethodResult.Success("Old"); + }, (payload) -> { + writeNewCalled = true; + payloadWriteNew = payload; + return failNewWrite ? MigrationMethodResult.Failure() : MigrationMethodResult.Success("New"); + }); + Optional> res = builder.build(); + assertTrue(res.isPresent()); + migration = res.get(); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationOpTrackerTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationOpTrackerTests.java new file mode 100644 index 0000000..ca1047d --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationOpTrackerTests.java @@ -0,0 +1,472 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogCapture; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.Event; +import org.jetbrains.annotations.NotNull; +import org.junit.Test; + +import java.time.Duration; +import java.util.Optional; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class MigrationOpTrackerTests { + private final LogCapture logCapture = Logs.capture(); + private final LDLogger testLogger = LDLogger.withAdapter(logCapture, ""); + + @Test + public void itCanMakeAMinimalEvent() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + Event.MigrationOp me = event.get(); + assertEquals(MigrationOp.READ.toString(), me.getOperation()); + assertEquals("test-key", me.getFeatureKey()); + assertEquals(detail.getReason(), me.getReason()); + assertEquals(detail.getValue(), me.getValue().stringValue()); + assertEquals(MigrationStage.LIVE.toString(), me.getValue().stringValue()); + assertEquals(MigrationStage.OFF.toString(), me.getDefaultVal().stringValue()); + assertEquals(context, me.getContext()); + + assertTrue(me.getInvokedMeasurement().wasOldInvoked()); + assertFalse(me.getInvokedMeasurement().wasNewInvoked()); + assertNull(me.getConsistencyMeasurement()); + assertNull(me.getErrorMeasurement()); + assertNull(me.getLatencyMeasurement()); + + assertEquals(0, logCapture.getMessages().size()); + } + + @Test + public void itDoesNotMakeAnEventIfTheFlagKeyIsEmpty() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = new MigrationOpTracker( + "", + null, + detail, + MigrationStage.OFF, + MigrationStage.LIVE, + context, + 1, + testLogger); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertFalse(event.isPresent()); + } + + @Test + public void itCanMakeAnEventForAReadOrWriteOperation() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(detail, context); + + tracker.op(MigrationOp.WRITE); + tracker.invoked(MigrationOrigin.NEW); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + Event.MigrationOp me = event.get(); + assertEquals(MigrationOp.WRITE.toString(), me.getOperation()); + + MigrationOpTracker tracker2 = basicTracker(detail, context); + + tracker2.op(MigrationOp.READ); + tracker2.invoked(MigrationOrigin.NEW); + + Optional event2 = tracker2.createEvent(); + assertTrue(event2.isPresent()); + Event.MigrationOp me2 = event2.get(); + assertEquals(MigrationOp.READ.toString(), me2.getOperation()); + + assertEquals(0, logCapture.getMessages().size()); + } + + @Test + public void itMakesNoEventIfNoOperationWasSet() { + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(getDetail(), context); + tracker.invoked(MigrationOrigin.OLD); + + assertFalse(tracker.createEvent().isPresent()); + + LogCapture.Message message = logCapture.requireMessage(LDLogLevel.ERROR, 0); + assertEquals("The operation must be set, using \"op\" before an event can be created.", message.getText()); + } + + @NotNull + private MigrationOpTracker basicTracker(EvaluationDetail detail, LDContext context) { + return new MigrationOpTracker( + "test-key", + null, + detail, + MigrationStage.OFF, + MigrationStage.LIVE, + context, + 1, + testLogger); + } + + @NotNull + private MigrationOpTracker trackerWithFlag(EvaluationDetail detail, LDContext context) { + return new MigrationOpTracker( + "test-key", + new DataModel.FeatureFlag("flag", 2, true, null, + "salt", null, null, null, null, 0, null, + false, false, false, null, false, + 5l, null, false), + detail, + MigrationStage.OFF, + MigrationStage.LIVE, + context, + 1, + testLogger); + } + + @Test + public void itMakesNoEventIfNoOriginsWereInvoked() { + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + + assertFalse(tracker.createEvent().isPresent()); + LogCapture.Message message = logCapture.requireMessage(LDLogLevel.ERROR, 0); + assertEquals("The migration invoked neither the \"old\" or \"new\" implementation and an event " + + "cannot be generated.", message.getText()); + } + + @Test + public void itMakesNoEventIfTheContextIsInvalid() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create(ContextKind.of("kind"), "kind-key"); + + MigrationOpTracker tracker = basicTracker(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + assertFalse(tracker.createEvent().isPresent()); + LogCapture.Message message = logCapture.requireMessage(LDLogLevel.ERROR, 0); + assertEquals("The migration was not done against a valid context and cannot generate an event.", message.getText()); + } + + @Test + public void itHandlesAllPermutationsOfInvoked() { + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + Optional opt1 = tracker.createEvent(); + assertTrue(opt1.isPresent()); + assertTrue(opt1.get().getInvokedMeasurement().wasOldInvoked()); + assertFalse(opt1.get().getInvokedMeasurement().wasNewInvoked()); + + tracker.invoked(MigrationOrigin.NEW); + Optional opt2 = tracker.createEvent(); + assertTrue(opt2.isPresent()); + assertTrue(opt2.get().getInvokedMeasurement().wasOldInvoked()); + assertTrue(opt2.get().getInvokedMeasurement().wasNewInvoked()); + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + tracker2.op(MigrationOp.READ); + tracker2.invoked(MigrationOrigin.NEW); + Optional opt3 = tracker2.createEvent(); + assertTrue(opt3.isPresent()); + assertFalse(opt3.get().getInvokedMeasurement().wasOldInvoked()); + assertTrue(opt3.get().getInvokedMeasurement().wasNewInvoked()); + assertEquals(0, logCapture.getMessages().size()); + } + + private static EvaluationDetail getDetail() { + return EvaluationDetail.fromValue( + "live", -1, EvaluationReason.error(EvaluationReason.ErrorKind.FLAG_NOT_FOUND)); + } + + @Test + public void itHandlesAllPermutationsOfErrors() { + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + tracker.invoked(MigrationOrigin.NEW); + + Optional opt1 = tracker.createEvent(); + assertTrue(opt1.isPresent()); + assertNull(opt1.get().getErrorMeasurement()); + + tracker.error(MigrationOrigin.OLD); + + Optional opt2 = tracker.createEvent(); + assertTrue(opt2.isPresent()); + assertNotNull(opt2.get().getErrorMeasurement()); + assertTrue(opt2.get().getErrorMeasurement().hasMeasurement()); + assertTrue(opt2.get().getErrorMeasurement().hasOldError()); + assertFalse(opt2.get().getErrorMeasurement().hasNewError()); + + tracker.error(MigrationOrigin.NEW); + + Optional opt3 = tracker.createEvent(); + assertTrue(opt3.isPresent()); + assertNotNull(opt3.get().getErrorMeasurement()); + assertTrue(opt3.get().getErrorMeasurement().hasMeasurement()); + assertTrue(opt3.get().getErrorMeasurement().hasOldError()); + assertTrue(opt3.get().getErrorMeasurement().hasNewError()); + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + + tracker2.op(MigrationOp.READ); + tracker2.invoked(MigrationOrigin.NEW); + tracker2.error(MigrationOrigin.NEW); + Optional opt4 = tracker2.createEvent(); + assertTrue(opt4.isPresent()); + assertNotNull(opt4.get().getErrorMeasurement()); + assertTrue(opt4.get().getErrorMeasurement().hasMeasurement()); + assertFalse(opt4.get().getErrorMeasurement().hasOldError()); + assertTrue(opt4.get().getErrorMeasurement().hasNewError()); + + assertEquals(0, logCapture.getMessages().size()); + } + + @Test + public void itHandlesAllPermutationsOfLatency() { + LDContext context = LDContext.create("user-key"); + + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + tracker.invoked(MigrationOrigin.NEW); + + Optional opt1 = tracker.createEvent(); + assertTrue(opt1.isPresent()); + assertNull(opt1.get().getLatencyMeasurement()); + + tracker.latency(MigrationOrigin.OLD, Duration.ofMillis(100)); + + Optional opt2 = tracker.createEvent(); + assertTrue(opt2.isPresent()); + assertNotNull(opt2.get().getLatencyMeasurement()); + assertEquals(100, opt2.get().getLatencyMeasurement().getOldLatencyMs().longValue()); + assertNull(opt2.get().getLatencyMeasurement().getNewLatencyMs()); + + tracker.latency(MigrationOrigin.NEW, Duration.ofMillis(200)); + + Optional opt3 = tracker.createEvent(); + assertTrue(opt3.isPresent()); + assertNotNull(opt3.get().getLatencyMeasurement()); + assertEquals(100, opt3.get().getLatencyMeasurement().getOldLatencyMs().longValue()); + assertEquals(200, opt3.get().getLatencyMeasurement().getNewLatencyMs().longValue()); + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + + tracker2.op(MigrationOp.READ); + tracker2.invoked(MigrationOrigin.OLD); + tracker2.invoked(MigrationOrigin.NEW); + + tracker2.latency(MigrationOrigin.NEW, Duration.ofMillis(100)); + + Optional opt4 = tracker2.createEvent(); + assertTrue(opt4.isPresent()); + assertNotNull(opt4.get().getLatencyMeasurement()); + assertEquals(100, opt4.get().getLatencyMeasurement().getNewLatencyMs().longValue()); + assertNull(opt4.get().getLatencyMeasurement().getOldLatencyMs()); + + assertEquals(0, logCapture.getMessages().size()); + } + + @Test + public void itReportsAConsistencyErrorForLatencyWithoutInvoked() { + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + + tracker.invoked(MigrationOrigin.OLD); + tracker.latency(MigrationOrigin.NEW, Duration.ofMillis(100)); + + Optional opt = tracker.createEvent(); + assertFalse(opt.isPresent()); + assertEquals("For migration op(read) flagKey(test-key): Latency was recorded for " + + "NEW, but that origin was not invoked.", logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + + tracker2.op(MigrationOp.WRITE); + + tracker2.invoked(MigrationOrigin.NEW); + tracker2.latency(MigrationOrigin.OLD, Duration.ofMillis(100)); + + Optional opt2 = tracker2.createEvent(); + assertFalse(opt2.isPresent()); + assertEquals("For migration op(write) flagKey(test-key): Latency was recorded for " + + "OLD, but that origin was not invoked.", logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + } + + @Test + public void itReportsAConsistencyErrorForErrorsWithoutInvoked() { + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + + tracker.invoked(MigrationOrigin.OLD); + tracker.error(MigrationOrigin.NEW); + + Optional opt = tracker.createEvent(); + assertFalse(opt.isPresent()); + assertEquals("For migration op(read) flagKey(test-key): Error reported for NEW, but that " + + "origin was not invoked.", logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + + tracker2.op(MigrationOp.WRITE); + + tracker2.invoked(MigrationOrigin.NEW); + tracker2.error(MigrationOrigin.OLD); + + Optional opt2 = tracker2.createEvent(); + assertFalse(opt2.isPresent()); + assertEquals("For migration op(write) flagKey(test-key): Error reported for OLD, but that " + + "origin was not invoked.", logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + } + + @Test + public void itReportsAConsistencyErrorForComparisonWithoutBothMethods() { + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + + tracker.invoked(MigrationOrigin.OLD); + tracker.consistency(() -> true); + + Optional opt = tracker.createEvent(); + assertFalse(opt.isPresent()); + assertEquals("For migration op(read) flagKey(test-key): Consistency check" + + " was done, but NEW was not invoked. Both \"old\" and \"new\" must be invoked to do a comparison.", + logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + + + MigrationOpTracker tracker2 = basicTracker(getDetail(), context); + + tracker2.op(MigrationOp.WRITE); + + tracker2.invoked(MigrationOrigin.NEW); + tracker2.consistency(() -> true); + + Optional opt2 = tracker2.createEvent(); + assertFalse(opt2.isPresent()); + assertEquals("For migration op(write) flagKey(test-key): Consistency check " + + "was done, but OLD was not invoked. Both \"old\" and \"new\" must be invoked to do a comparison.", + logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + } + + @Test + public void itHandlesExceptionsInTheComparisonMethod() { + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(getDetail(), context); + + tracker.op(MigrationOp.READ); + + tracker.invoked(MigrationOrigin.OLD); + tracker.invoked(MigrationOrigin.NEW); + tracker.consistency(() -> {throw new RuntimeException("I HAVE FAILED");}); + + Optional opt = tracker.createEvent(); + assertTrue(opt.isPresent()); + assertEquals("Exception when executing consistency check function for migration 'test-key' the" + + " consistency check will not be included in the generated migration op event. Exception:" + + " java.lang.RuntimeException: I HAVE FAILED", + logCapture.requireMessage(LDLogLevel.ERROR, 0).getText()); + + assertNull(opt.get().getConsistencyMeasurement()); + } + + @Test + public void itUsesTheDefaultSamplingRatioOfOne() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + assertEquals(1, event.get().getSamplingRatio()); + } + + @Test + public void itUsesTheDefaultVersionOfNegativeOne() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = basicTracker(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + assertEquals(-1, event.get().getFlagVersion()); + } + + @Test + public void itCanIncludeAVersionFromAFlag() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = trackerWithFlag(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + assertEquals(2, event.get().getFlagVersion()); + } + + @Test + public void itCanIncludeASamplingRatioFromAFlag() { + EvaluationDetail detail = getDetail(); + LDContext context = LDContext.create("user-key"); + MigrationOpTracker tracker = trackerWithFlag(detail, context); + + tracker.op(MigrationOp.READ); + tracker.invoked(MigrationOrigin.OLD); + + Optional event = tracker.createEvent(); + assertTrue(event.isPresent()); + assertEquals(5, event.get().getSamplingRatio()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStageTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStageTests.java new file mode 100644 index 0000000..6f2160c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStageTests.java @@ -0,0 +1,51 @@ +package com.launchdarkly.sdk.server; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +@RunWith(Enclosed.class) +public class MigrationStageTests { + + public static class BasicTests { + @Test + public void itHandlesWhenAStringIsNotAStage() { + Assert.assertFalse(MigrationStage.isStage("potato")); + } + } + + @RunWith(Parameterized.class) + public static class GivenEachStageTest { + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + {MigrationStage.OFF, "off"}, + {MigrationStage.DUAL_WRITE, "dualwrite"}, + {MigrationStage.SHADOW, "shadow"}, + {MigrationStage.LIVE, "live"}, + {MigrationStage.RAMP_DOWN, "rampdown"}, + {MigrationStage.COMPLETE, "complete"} + }); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Parameterized.Parameter(value = 1) + public String stageString; + + @Test + public void itCanConvertToAString() { + Assert.assertEquals(stageString, stage.toString()); + } + + @Test + public void itCanTestAValueIsAStage() { + Assert.assertTrue(MigrationStage.isStage(stageString)); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStagesExpectedExecutionTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStagesExpectedExecutionTests.java new file mode 100644 index 0000000..0d8c2ec --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationStagesExpectedExecutionTests.java @@ -0,0 +1,393 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationExecution; +import com.launchdarkly.sdk.server.migrations.MigrationSerialOrder; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@RunWith(Parameterized.class) +public class MigrationStagesExpectedExecutionTests extends MigrationExecutionFixture { + + @Parameterized.Parameters(name = "{0}-{1}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + // Mode, Stage, read old, read new, write old, write new. + {MigrationExecution.Parallel(), MigrationStage.OFF, true, false, true, false}, + {MigrationExecution.Parallel(), MigrationStage.DUAL_WRITE, true, false, true, true}, + {MigrationExecution.Parallel(), MigrationStage.SHADOW, true, true, true, true}, + {MigrationExecution.Parallel(), MigrationStage.LIVE, true, true, true, true}, + {MigrationExecution.Parallel(), MigrationStage.RAMP_DOWN, false, true, true, true}, + {MigrationExecution.Parallel(), MigrationStage.COMPLETE, false, true, false, true}, + + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.OFF, true, false, true, false}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.DUAL_WRITE, true, false, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.SHADOW, true, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.LIVE, true, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.RAMP_DOWN, false, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.FIXED), MigrationStage.COMPLETE, false, true, false, true}, + + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.OFF, true, false, true, false}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.DUAL_WRITE, true, false, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.SHADOW, true, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.LIVE, true, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.RAMP_DOWN, false, true, true, true}, + {MigrationExecution.Serial(MigrationSerialOrder.RANDOM), MigrationStage.COMPLETE, false, true, false, true} + }); + } + + /** + * The parameterization is done using a constructor here so that the base class can be parameterized. + * This did not combine well with an outer class using Enclosed.class. + * + * @param execution to test + * @param stage to test + * @param expectReadOldCalled expected read old + * @param expectReadNewCalled expected read new + * @param expectWriteOldCalled expected write old + * @param expectWriteNewCalled expected write new + */ + public MigrationStagesExpectedExecutionTests( + MigrationExecution execution, + MigrationStage stage, + boolean expectReadOldCalled, + boolean expectReadNewCalled, + boolean expectWriteOldCalled, + boolean expectWriteNewCalled + ) { + super(execution); + this.stage = stage; + this.expectReadOldCalled = expectReadOldCalled; + this.expectReadNewCalled = expectReadNewCalled; + this.expectWriteOldCalled = expectWriteOldCalled; + this.expectWriteNewCalled = expectWriteNewCalled; + } + + public MigrationStage stage; + public boolean expectReadOldCalled; + public boolean expectReadNewCalled; + public boolean expectWriteOldCalled; + public boolean expectWriteNewCalled; + + private void assertReads() { + assertEquals("Expected read old", expectReadOldCalled, readOldCalled); + assertEquals("Expected read new", expectReadNewCalled, readNewCalled); + // For a read there should be no writes. + assertFalse("Expected write old", writeOldCalled); + assertFalse("Expected write new", writeNewCalled); + } + + private void assertWrites() { + assertEquals("Expected write old", expectWriteOldCalled, writeOldCalled); + assertEquals("Expected write new", expectWriteNewCalled, writeNewCalled); + // For a write there should be no reads. + assertFalse("Expected read old", readOldCalled); + assertFalse("Expected read new", readNewCalled); + } + + + @Test + public void itReadsFromCorrectSources() { + // No flag config here, just evaluate using the defaults. + Migration.MigrationResult res = migration.read(flagKey, LDContext.create("user-key"), stage); + assertTrue(res.isSuccess()); + assertTrue(res.getResult().isPresent()); + switch(stage) { + case OFF: // Fallthrough cases that have authoritative old. + case DUAL_WRITE: + case SHADOW: + assertEquals("Old", res.getResult().get()); + break; + case LIVE: // Fallthrough cases that have authoritative new. + case RAMP_DOWN: + case COMPLETE: + assertEquals("New", res.getResult().get()); + break; + } + assertReads(); + } + + @Test + public void itWritesToCorrectSources() { + // No flag config here, just evaluate using the defaults. + Migration.MigrationWriteResult res = migration.write(flagKey, LDContext.create("user-key"), stage); + + assertTrue(res.getAuthoritative().isSuccess()); + assertTrue(res.getAuthoritative().getResult().isPresent()); + switch(stage) { + case OFF: + assertEquals("Old", res.getAuthoritative().getResult().get()); + break; + case DUAL_WRITE: // Dual write and shadow do the same thing. + case SHADOW: + assertTrue(res.getNonAuthoritative().isPresent()); + assertTrue(res.getNonAuthoritative().get().isSuccess()); + assertEquals("New", res.getNonAuthoritative().get().getResult().get()); + assertEquals("Old", res.getAuthoritative().getResult().get()); + break; + case LIVE: // Live and rampdown do the same thing. + case RAMP_DOWN: + assertTrue(res.getNonAuthoritative().isPresent()); + assertTrue(res.getNonAuthoritative().get().isSuccess()); + assertEquals("Old", res.getNonAuthoritative().get().getResult().get()); + assertEquals("New", res.getAuthoritative().getResult().get()); + break; + case COMPLETE: + assertEquals("New", res.getAuthoritative().getResult().get()); + break; + } + + assertWrites(); + } + + @Test + public void itReportsReadOperationsCorrectly() { + migration.read(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + assertEquals(MigrationOp.READ.toString(), me.getOperation()); + } + + @Test + public void itReportsWriteOperationsCorrectly() { + migration.write(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + assertEquals(MigrationOp.WRITE.toString(), me.getOperation()); + } + + @Test + public void itReportsTheCorrectOriginForReadOperations() { + Migration.MigrationResult res = migration.read(flagKey, LDContext.create("user-key"), stage); + switch(stage) { + case OFF: // Fallthrough cases that have authoritative old. + case DUAL_WRITE: + case SHADOW: + assertEquals(MigrationOrigin.OLD, res.getOrigin()); + break; + case LIVE: // Fallthrough cases that have authoritative new. + case RAMP_DOWN: + case COMPLETE: + assertEquals(MigrationOrigin.NEW, res.getOrigin()); + break; + } + } + + @Test + public void itReportsTheCorrectOriginForWriteOperations() { + Migration.MigrationWriteResult res = migration.write(flagKey, LDContext.create("user-key"), stage); + + switch(stage) { + case OFF: + assertEquals(MigrationOrigin.OLD, res.getAuthoritative().getOrigin()); + break; + case DUAL_WRITE: // Dual write and shadow do the same thing. + case SHADOW: + assertEquals(MigrationOrigin.OLD, res.getAuthoritative().getOrigin()); + assertEquals(MigrationOrigin.NEW, res.getNonAuthoritative().get().getOrigin()); + break; + case LIVE: // Live and rampdown do the same thing. + case RAMP_DOWN: + assertEquals(MigrationOrigin.NEW, res.getAuthoritative().getOrigin()); + assertEquals(MigrationOrigin.OLD, res.getNonAuthoritative().get().getOrigin()); + break; + case COMPLETE: + assertEquals(MigrationOrigin.NEW, res.getAuthoritative().getOrigin()); + break; + } + } + + @Test public void itReportsReadErrorsCorrectlyForOld() { + failOldRead = true; + Migration.MigrationResult res = migration.read(flagKey, LDContext.create("user-key"), stage); + switch(stage) { + case OFF: // Fallthrough cases that have authoritative old. + case DUAL_WRITE: + case SHADOW: + assertFalse(res.isSuccess()); + break; + case LIVE: // Fallthrough cases that have authoritative new. + case RAMP_DOWN: + case COMPLETE: + assertTrue(res.isSuccess()); + break; + } + + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + if(expectReadOldCalled) { + assertNotNull(me.getErrorMeasurement()); + assertTrue(me.getErrorMeasurement().hasOldError()); + } else { + assertNull(me.getErrorMeasurement()); + } + } + + @Test public void itReportsReadErrorsCorrectlyForNew() { + failNewRead = true; + Migration.MigrationResult res = migration.read(flagKey, LDContext.create("user-key"), stage); + switch(stage) { + case OFF: // Fallthrough cases that have authoritative old. + case DUAL_WRITE: + case SHADOW: + assertTrue(res.isSuccess()); + break; + case LIVE: // Fallthrough cases that have authoritative new. + case RAMP_DOWN: + case COMPLETE: + assertFalse(res.isSuccess()); + break; + } + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + if(expectReadNewCalled) { + assertNotNull(me.getErrorMeasurement()); + assertTrue(me.getErrorMeasurement().hasNewError()); + } else { + assertNull(me.getErrorMeasurement()); + } + } + + @Test public void itReportsReadErrorsCorrectlyForBoth() { + failNewRead = true; + failOldRead = true; + migration.read(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + assertNotNull(me.getErrorMeasurement()); + + if(expectReadNewCalled) { + assertTrue(me.getErrorMeasurement().hasNewError()); + } else { + assertFalse(me.getErrorMeasurement().hasNewError()); + } + + if(expectReadOldCalled) { + assertTrue(me.getErrorMeasurement().hasOldError()); + } else { + assertFalse(me.getErrorMeasurement().hasOldError()); + } + } + + @Test + public void itAddsTheCorrectInvokedMeasurementsForReads() { + migration.read(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + assertNotNull(me.getInvokedMeasurement()); + if(expectReadOldCalled) { + assertTrue(me.getInvokedMeasurement().wasOldInvoked()); + } else { + assertFalse(me.getInvokedMeasurement().wasOldInvoked()); + } + if(expectReadNewCalled) { + assertTrue(me.getInvokedMeasurement().wasNewInvoked()); + } else { + assertFalse(me.getInvokedMeasurement().wasNewInvoked()); + } + } + + @Test + public void itAddsTheCorrectInvokedMeasurementsForWrites() { + migration.write(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + assertNotNull(me.getInvokedMeasurement()); + if(expectWriteOldCalled) { + assertTrue(me.getInvokedMeasurement().wasOldInvoked()); + } else { + assertFalse(me.getInvokedMeasurement().wasOldInvoked()); + } + if(expectWriteNewCalled) { + assertTrue(me.getInvokedMeasurement().wasNewInvoked()); + } else { + assertFalse(me.getInvokedMeasurement().wasNewInvoked()); + } + } + + @Test + public void itDoesNotReportErrorsWhenThereAreNoneForReads() { + migration.read(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + assertNull(me.getErrorMeasurement()); + } + + @Test + public void itDoesNotReportErrorsWhenThereAreNoneForWrites() { + migration.write(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + assertNull(me.getErrorMeasurement()); + } + + @Test + public void itDoesReportLatencyForReads() { + migration.read(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + assertNotNull(me.getLatencyMeasurement()); + if(expectReadOldCalled) { + assertNotNull(me.getLatencyMeasurement().getOldLatencyMs()); + } else { + assertNull(me.getLatencyMeasurement().getOldLatencyMs()); + } + if(expectReadNewCalled) { + assertNotNull(me.getLatencyMeasurement().getNewLatencyMs()); + } else { + assertNull(me.getLatencyMeasurement().getNewLatencyMs()); + } + } + + @Test + public void itDoesReportLatencyForWrites() { + migration.write(flagKey, LDContext.create("user-key"), stage); + assertEquals(2, eventSink.events.size()); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp)e; + + assertNotNull(me.getLatencyMeasurement()); + if(expectWriteOldCalled) { + assertNotNull(me.getLatencyMeasurement().getOldLatencyMs()); + } else { + assertNull(me.getLatencyMeasurement().getOldLatencyMs()); + } + if(expectWriteNewCalled) { + assertNotNull(me.getLatencyMeasurement().getNewLatencyMs()); + } else { + assertNull(me.getLatencyMeasurement().getNewLatencyMs()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationTests.java new file mode 100644 index 0000000..cd70ffc --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationTests.java @@ -0,0 +1,326 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import com.launchdarkly.sdk.server.migrations.Migration; +import com.launchdarkly.sdk.server.migrations.MigrationBuilder; +import com.launchdarkly.sdk.server.migrations.MigrationExecution; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; +import java.util.Optional; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@RunWith(Enclosed.class) +public class MigrationTests { + + @RunWith(Parameterized.class) + public static class GivenStagesThatPerformOldWritesFirstTest extends MigrationExecutionFixture { + public GivenStagesThatPerformOldWritesFirstTest() { + super(MigrationExecution.Parallel()); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList( + MigrationStage.DUAL_WRITE, + MigrationStage.SHADOW + ); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Test + public void isStopsWritingWhenItEncountersAnError() { + failOldWrite = true; + Migration.MigrationWriteResult res = migration.write(flagKey, LDContext.create("user-key"), stage); + assertFalse(res.getAuthoritative().isSuccess()); + assertFalse(res.getNonAuthoritative().isPresent()); + // The old write should fail, so the new write will not be done. + assertTrue(writeOldCalled); + assertFalse(writeNewCalled); + + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + + assertFalse(me.getInvokedMeasurement().wasNewInvoked()); + assertTrue(me.getInvokedMeasurement().wasOldInvoked()); + + assertNotNull(me.getErrorMeasurement()); + assertFalse(me.getErrorMeasurement().hasNewError()); + assertTrue(me.getErrorMeasurement().hasOldError()); + } + } + + @RunWith(Parameterized.class) + public static class GivenStagesThatPerformNewWritesFirstTest extends MigrationExecutionFixture { + public GivenStagesThatPerformNewWritesFirstTest() { + super(MigrationExecution.Parallel()); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList( + MigrationStage.LIVE, + MigrationStage.RAMP_DOWN + ); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Test + public void itStopsWritingWhenItEncountersAnError() { + failNewWrite = true; + Migration.MigrationWriteResult res = migration.write(flagKey, LDContext.create("user-key"), stage); + assertFalse(res.getAuthoritative().isSuccess()); + assertFalse(res.getNonAuthoritative().isPresent()); + // The new write should fail, so the new write will not be done. + assertTrue(writeNewCalled); + assertFalse(writeOldCalled); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + + assertTrue(me.getInvokedMeasurement().wasNewInvoked()); + assertFalse(me.getInvokedMeasurement().wasOldInvoked()); + + assertNotNull(me.getErrorMeasurement()); + assertTrue(me.getErrorMeasurement().hasNewError()); + assertFalse(me.getErrorMeasurement().hasOldError()); + } + } + + @RunWith(Parameterized.class) + public static class GivenMigrationStagesExecutedWithPayloadsTest extends MigrationExecutionFixture { + public GivenMigrationStagesExecutedWithPayloadsTest() { + super(MigrationExecution.Parallel()); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + // Stage, read old payload, read new payload, write old payload, write new payload. + {MigrationStage.OFF, "payload", null, "payload", null}, + {MigrationStage.DUAL_WRITE, "payload", null, "payload", "payload"}, + {MigrationStage.SHADOW, "payload", "payload", "payload", "payload"}, + {MigrationStage.LIVE, "payload", "payload", "payload", "payload"}, + {MigrationStage.RAMP_DOWN, null, "payload", "payload", "payload"}, + {MigrationStage.COMPLETE, null, "payload", null, "payload"} + }); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Parameterized.Parameter(value = 1) + public String expectedOldReadPayload; + + @Parameterized.Parameter(value = 2) + public String expectedNewReadPayload; + + @Parameterized.Parameter(value = 3) + public String expectedOldWritePayload; + + @Parameterized.Parameter(value = 4) + public String expectedNewWritePayload; + + private void assertReads() { + assertEquals("Expected read old", expectedOldReadPayload, payloadReadOld); + assertEquals("Expected read new", expectedNewReadPayload, payloadReadNew); + // For a read there should be no writes. + assertNull("Expected write old", payloadWriteOld); + assertNull("Expected write new", payloadWriteNew); + } + + private void assertWrites() { + assertEquals("Expected write old", expectedOldWritePayload, payloadWriteOld); + assertEquals("Expected write new", expectedNewWritePayload, payloadWriteNew); + // For a write there should be no reads. + assertNull("Expected read old", payloadReadOld); + assertNull("Expected read new", payloadReadNew); + } + + + @Test + public void itCorrectlyForwardsTheReadPayload() { + // No flag config here, just evaluate using the defaults. + migration.read(flagKey, LDContext.create("user-key"), stage, "payload"); + assertReads(); + } + + @Test + public void itCorrectlyForwardsTheWritePayload() { + // No flag config here, just evaluate using the defaults. + migration.write(flagKey, LDContext.create("user-key"), stage, "payload"); + assertWrites(); + } + } + + @RunWith(Parameterized.class) + public static class GivenMigrationThatDoesNotTrackLatencyTest extends MigrationExecutionFixture { + public GivenMigrationThatDoesNotTrackLatencyTest() { + super(MigrationExecution.Parallel(), + false, false); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + // Stage, read old payload, read new payload, write old payload, write new payload. + {MigrationStage.OFF}, + {MigrationStage.DUAL_WRITE}, + {MigrationStage.SHADOW}, + {MigrationStage.LIVE}, + {MigrationStage.RAMP_DOWN}, + {MigrationStage.COMPLETE} + }); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Test + public void itDoesNotTrackLatencyForReads() { + migration.read(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNull(me.getLatencyMeasurement()); + } + + @Test + public void itDoesNotTrackLatencyForWrites() { + migration.write(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNull(me.getLatencyMeasurement()); + } + } + + @RunWith(Parameterized.class) + public static class GivenMigrationThatDoesNotTrackErrorsTest extends MigrationExecutionFixture { + public GivenMigrationThatDoesNotTrackErrorsTest() { + super(MigrationExecution.Parallel(), + false, false); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return Arrays.asList(new Object[][]{ + // Stage, read old payload, read new payload, write old payload, write new payload. + {MigrationStage.OFF}, + {MigrationStage.DUAL_WRITE}, + {MigrationStage.SHADOW}, + {MigrationStage.LIVE}, + {MigrationStage.RAMP_DOWN}, + {MigrationStage.COMPLETE} + }); + } + + @Parameterized.Parameter + public MigrationStage stage; + + @Test + public void itDoesNotTrackErrorsForReads() { + failNewRead = true; + failOldRead = true; + migration.read(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNull(me.getErrorMeasurement()); + } + + @Test + public void itDoesNotTrackErrorsForWrites() { + failOldWrite = true; + failNewWrite = true; + migration.write(flagKey, LDContext.create("user-key"), stage); + Event e = eventSink.events.get(1); + assertEquals(Event.MigrationOp.class, e.getClass()); + Event.MigrationOp me = (Event.MigrationOp) e; + assertNull(me.getErrorMeasurement()); + } + } + + public static class GivenOperationsWhichThrowTest extends BaseTest { + public final LDClientInterface client = new LDClient("SDK_KEY", baseConfig() + .build()); + + public Migration migration; + + public GivenOperationsWhichThrowTest() { + MigrationBuilder builder = new MigrationBuilder(client) + .read((payload) -> { + throw new RuntimeException("old read"); + }, (payload) -> { + throw new RuntimeException("new read"); + }) + .write((payload) -> { + throw new RuntimeException("old write"); + }, (payload) -> { + throw new RuntimeException("new write"); + }); + Optional> res = builder.build(); + assertTrue(res.isPresent()); + migration = res.get(); + } + + @Test + public void itHandlesExceptionInOldRead() { + Migration.MigrationResult res = migration.read("test-flag", + LDContext.create("user-key"), MigrationStage.OFF); + + assertFalse(res.isSuccess()); + Optional exception = res.getException(); + assertTrue(exception.isPresent()); + assertEquals("old read", exception.get().getMessage()); + } + + @Test + public void itHandlesExceptionInNewRead() { + Migration.MigrationResult res = migration.read("test-flag", + LDContext.create("user-key"), MigrationStage.LIVE); + + assertFalse(res.isSuccess()); + Optional exception = res.getException(); + assertTrue(exception.isPresent()); + assertEquals("new read", exception.get().getMessage()); + } + + @Test + public void itHandlesExceptionInOldWrite() { + Migration.MigrationWriteResult res = migration.write("test-flag", + LDContext.create("user-key"), MigrationStage.OFF); + + assertFalse(res.getAuthoritative().isSuccess()); + Optional exception = res.getAuthoritative().getException(); + assertTrue(exception.isPresent()); + assertEquals("old write", exception.get().getMessage()); + } + + @Test + public void itHandlesExceptionInNewWrite() { + Migration.MigrationWriteResult res = migration.write("test-flag", + LDContext.create("user-key"), MigrationStage.LIVE); + + assertFalse(res.getAuthoritative().isSuccess()); + Optional exception = res.getAuthoritative().getException(); + assertTrue(exception.isPresent()); + assertEquals("new write", exception.get().getMessage()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationVariationTests.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationVariationTests.java new file mode 100644 index 0000000..625e7ee --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/MigrationVariationTests.java @@ -0,0 +1,63 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.integrations.TestData; +import com.launchdarkly.sdk.server.interfaces.LDClientInterface; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +@RunWith(Parameterized.class) +public class MigrationVariationTests extends BaseTest { + + @Parameterized.Parameters(name = "{0}" ) + public static Iterable data() { + return Arrays.asList( + MigrationStage.OFF, + MigrationStage.DUAL_WRITE, + MigrationStage.SHADOW, + MigrationStage.LIVE, + MigrationStage.RAMP_DOWN, + MigrationStage.COMPLETE + ); + } + + private final TestData testData = TestData.dataSource(); + + private final LDClientInterface client = new LDClient("SDK_KEY", baseConfig() + .dataSource(testData) + .build()); + + @Parameterized.Parameter + public MigrationStage stage; + + @Test + public void itEvaluatesDefaultForMissingFlag() { + MigrationVariation resStage = client.migrationVariation("key", LDContext.create("potato"), stage); + Assert.assertEquals(stage, resStage.getStage()); + } + + @Test + public void itDoesEvaluateDefaultForFlagWithInvalidStage() { + final String flagKey = "test-flag"; + final LDContext context = LDContext.create("test-key"); + testData.update(testData.flag(flagKey).valueForAll(LDValue.of("potato"))); + MigrationVariation resStage = client.migrationVariation(flagKey, context, stage); + Assert.assertEquals(stage, resStage.getStage()); + } + + @Test + public void itEvaluatesCorrectValueForExistingFlag() { + final String flagKey = "test-flag"; + final LDContext context = LDContext.create("test-key"); + testData.update(testData.flag(flagKey).valueForAll(LDValue.of(stage.toString()))); + // Get a stage that is not the stage we are testing. + MigrationStage defaultStage = Arrays.stream(MigrationStage.values()).filter(item -> item != stage).findFirst().get(); + MigrationVariation resStage = client.migrationVariation(flagKey, context, defaultStage); + Assert.assertEquals(stage, resStage.getStage()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ModelBuilders.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ModelBuilders.java new file mode 100644 index 0000000..b9a6f1a --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ModelBuilders.java @@ -0,0 +1,534 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.Clause; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Operator; +import com.launchdarkly.sdk.server.DataModel.Prerequisite; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.Rule; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.SegmentRule; +import com.launchdarkly.sdk.server.DataModel.SegmentTarget; +import com.launchdarkly.sdk.server.DataModel.Target; +import com.launchdarkly.sdk.server.DataModel.VariationOrRollout; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static java.util.Arrays.asList; + +@SuppressWarnings("javadoc") +public abstract class ModelBuilders { + public static FlagBuilder flagBuilder(String key) { + return new FlagBuilder(key); + } + + public static FlagBuilder flagBuilder(DataModel.FeatureFlag fromFlag) { + return new FlagBuilder(fromFlag); + } + + public static FeatureFlag booleanFlagWithClauses(String key, DataModel.Clause... clauses) { + DataModel.Rule rule = ruleBuilder().variation(1).clauses(clauses).build(); + return flagBuilder(key) + .on(true) + .rules(rule) + .fallthrough(fallthroughVariation(0)) + .offVariation(0) + .variations(LDValue.of(false), LDValue.of(true)) + .build(); + } + + public static FeatureFlag flagWithValue(String key, LDValue value) { + return flagBuilder(key) + .on(false) + .offVariation(0) + .variations(value) + .build(); + } + + public static VariationOrRollout fallthroughVariation(int variation) { + return new DataModel.VariationOrRollout(variation, null); + } + + public static RuleBuilder ruleBuilder() { + return new RuleBuilder(); + } + + public static Clause clause( + ContextKind contextKind, + AttributeRef attribute, + Operator op, + LDValue... values + ) { + return new Clause(contextKind, attribute, op, Arrays.asList(values), false); + } + + public static Clause clause(ContextKind contextKind, String attributeName, DataModel.Operator op, LDValue... values) { + return clause(contextKind, AttributeRef.fromLiteral(attributeName), op, values); + } + + public static Clause clause(String attributeName, DataModel.Operator op, LDValue... values) { + return clause(null, attributeName, op, values); + } + + public static Clause clauseMatchingContext(LDContext context) { + if (context.isMultiple()) { + return clauseMatchingContext(context.getIndividualContext(0)); + } + return clause(context.getKind(), AttributeRef.fromLiteral("key"), DataModel.Operator.in, LDValue.of(context.getKey())); + } + + public static Clause clauseNotMatchingContext(LDContext context) { + return negateClause(clauseMatchingContext(context)); + } + + public static Clause clauseMatchingSegment(String... segmentKeys) { + LDValue[] values = new LDValue[segmentKeys.length]; + for (int i = 0; i < segmentKeys.length; i++) { + values[i] = LDValue.of(segmentKeys[i]); + } + return clause(null, (AttributeRef)null, DataModel.Operator.segmentMatch, values); + } + + public static Clause clauseMatchingSegment(Segment segment) { + return clauseMatchingSegment(segment.getKey()); + } + + public static Clause negateClause(Clause clause) { + return new Clause(clause.getContextKind(), clause.getAttribute(), clause.getOp(), clause.getValues(), !clause.isNegate()); + } + + public static Target target(ContextKind contextKind, int variation, String... userKeys) { + return new Target(contextKind, ImmutableSet.copyOf(userKeys), variation); + } + + public static Target target(int variation, String... userKeys) { + return target(null, variation, userKeys); + } + + public static Prerequisite prerequisite(String key, int variation) { + return new DataModel.Prerequisite(key, variation); + } + + public static Rollout emptyRollout() { + return new DataModel.Rollout(null, ImmutableList.of(), null, RolloutKind.rollout, null); + } + + public static SegmentBuilder segmentBuilder(String key) { + return new SegmentBuilder(key); + } + + public static SegmentRuleBuilder segmentRuleBuilder() { + return new SegmentRuleBuilder(); + } + + public static class FlagBuilder { + private String key; + private int version; + private boolean on; + private List prerequisites = new ArrayList<>(); + private String salt; + private List targets = new ArrayList<>(); + private List contextTargets = new ArrayList<>(); + private List rules = new ArrayList<>(); + private VariationOrRollout fallthrough; + private Integer offVariation; + private List variations = new ArrayList<>(); + private boolean clientSide; + private boolean trackEvents; + private boolean trackEventsFallthrough; + private Long debugEventsUntilDate; + private boolean deleted; + private Long samplingRatio; + private FeatureFlag.Migration migration; + private boolean excludeFromSummaries; + + private boolean disablePreprocessing = false; + + private FlagBuilder(String key) { + this.key = key; + } + + private FlagBuilder(DataModel.FeatureFlag f) { + if (f != null) { + this.key = f.getKey(); + this.version = f.getVersion(); + this.on = f.isOn(); + this.prerequisites = f.getPrerequisites(); + this.salt = f.getSalt(); + this.targets = f.getTargets(); + this.contextTargets = f.getContextTargets(); + this.rules = f.getRules(); + this.fallthrough = f.getFallthrough(); + this.offVariation = f.getOffVariation(); + this.variations = f.getVariations(); + this.clientSide = f.isClientSide(); + this.trackEvents = f.isTrackEvents(); + this.trackEventsFallthrough = f.isTrackEventsFallthrough(); + this.debugEventsUntilDate = f.getDebugEventsUntilDate(); + this.deleted = f.isDeleted(); + this.samplingRatio = f.getSamplingRatio(); + this.migration = f.getMigration(); + this.excludeFromSummaries = f.isExcludeFromSummaries(); + } + } + + public FlagBuilder version(int version) { + this.version = version; + return this; + } + + public FlagBuilder on(boolean on) { + this.on = on; + return this; + } + + public FlagBuilder prerequisites(Prerequisite... prerequisites) { + this.prerequisites = Arrays.asList(prerequisites); + return this; + } + + public FlagBuilder salt(String salt) { + this.salt = salt; + return this; + } + + public FlagBuilder targets(Target... targets) { + this.targets = Arrays.asList(targets); + return this; + } + + public FlagBuilder addTarget(int variation, String... values) { + targets.add(target(variation, values)); + return this; + } + + public FlagBuilder contextTargets(Target... contextTargets) { + this.contextTargets = Arrays.asList(contextTargets); + return this; + } + + public FlagBuilder addContextTarget(ContextKind contextKind, int variation, String... values) { + contextTargets.add(target(contextKind, variation, values)); + return this; + } + + public FlagBuilder rules(Rule... rules) { + this.rules = Arrays.asList(rules); + return this; + } + + public FlagBuilder addRule(Rule rule) { + rules.add(rule); + return this; + } + + public FlagBuilder addRule(String id, int variation, String... clausesAsJson) { + Clause[] clauses = new Clause[clausesAsJson.length]; + for (int i = 0; i < clausesAsJson.length; i++) { + clauses[i] = JsonHelpers.deserialize(clausesAsJson[i], Clause.class); + } + return addRule(ruleBuilder().id(id).variation(variation).clauses(clauses).build()); + } + + public FlagBuilder fallthroughVariation(int fallthroughVariation) { + this.fallthrough = new DataModel.VariationOrRollout(fallthroughVariation, null); + return this; + } + + public FlagBuilder fallthrough(Rollout rollout) { + this.fallthrough = new DataModel.VariationOrRollout(null, rollout); + return this; + } + + public FlagBuilder fallthrough(VariationOrRollout fallthrough) { + this.fallthrough = fallthrough; + return this; + } + + public FlagBuilder offVariation(Integer offVariation) { + this.offVariation = offVariation; + return this; + } + + public FlagBuilder variations(LDValue... variations) { + this.variations = Arrays.asList(variations); + return this; + } + + public FlagBuilder variations(boolean... variations) { + List values = new ArrayList<>(); + for (boolean v: variations) { + values.add(LDValue.of(v)); + } + this.variations = values; + return this; + } + + public FlagBuilder variations(String... variations) { + List values = new ArrayList<>(); + for (String v: variations) { + values.add(LDValue.of(v)); + } + this.variations = values; + return this; + } + + public FlagBuilder generatedVariations(int numVariations) { + variations.clear(); + for (int i = 0; i < numVariations; i++) { + variations.add(LDValue.of(i)); + } + return this; + } + + public FlagBuilder clientSide(boolean clientSide) { + this.clientSide = clientSide; + return this; + } + + public FlagBuilder trackEvents(boolean trackEvents) { + this.trackEvents = trackEvents; + return this; + } + + public FlagBuilder trackEventsFallthrough(boolean trackEventsFallthrough) { + this.trackEventsFallthrough = trackEventsFallthrough; + return this; + } + + public FlagBuilder debugEventsUntilDate(Long debugEventsUntilDate) { + this.debugEventsUntilDate = debugEventsUntilDate; + return this; + } + + public FlagBuilder deleted(boolean deleted) { + this.deleted = deleted; + return this; + } + + public FlagBuilder disablePreprocessing(boolean disable) { + this.disablePreprocessing = disable; + return this; + } + + public FlagBuilder samplingRatio(long samplingRatio) { + this.samplingRatio = samplingRatio; + return this; + } + + public FlagBuilder migration(FeatureFlag.Migration migration) { + this.migration = migration; + return this; + } + + public FeatureFlag build() { + FeatureFlag flag = new DataModel.FeatureFlag(key, version, on, prerequisites, salt, targets, + contextTargets, rules, fallthrough, offVariation, variations, + clientSide, trackEvents, trackEventsFallthrough, debugEventsUntilDate, deleted, + samplingRatio, migration, excludeFromSummaries); + if (!disablePreprocessing) { + flag.afterDeserialized(); + } + return flag; + } + } + + public static class MigrationBuilder { + private Long checkRatio; + + public MigrationBuilder checkRatio(long checkRatio) { + this.checkRatio = checkRatio; + return this; + } + + public FeatureFlag.Migration build() { + return new FeatureFlag.Migration(checkRatio); + } + } + + public static class RuleBuilder { + private String id; + private List clauses = new ArrayList<>(); + private Integer variation; + private DataModel.Rollout rollout; + private boolean trackEvents; + + private RuleBuilder() { + } + + public DataModel.Rule build() { + return new DataModel.Rule(id, clauses, variation, rollout, trackEvents); + } + + public RuleBuilder id(String id) { + this.id = id; + return this; + } + + public RuleBuilder clauses(DataModel.Clause... clauses) { + this.clauses = ImmutableList.copyOf(clauses); + return this; + } + + public RuleBuilder variation(Integer variation) { + this.variation = variation; + return this; + } + + public RuleBuilder rollout(DataModel.Rollout rollout) { + this.rollout = rollout; + return this; + } + + public RuleBuilder trackEvents(boolean trackEvents) { + this.trackEvents = trackEvents; + return this; + } + } + + public static class SegmentBuilder { + private String key; + private Set included = new HashSet<>(); + private Set excluded = new HashSet<>(); + private List includedContexts = new ArrayList<>(); + private List excludedContexts = new ArrayList<>(); + private String salt = ""; + private List rules = new ArrayList<>(); + private int version = 0; + private boolean deleted; + private boolean unbounded; + private ContextKind unboundedContextKind; + private Integer generation; + private boolean disablePreprocessing; + + private SegmentBuilder(String key) { + this.key = key; + } + + private SegmentBuilder(Segment from) { + this.key = from.getKey(); + this.included = new HashSet<>(from.getIncluded()); + this.excluded = new HashSet<>(from.getExcluded()); + this.includedContexts = new ArrayList<>(from.getIncludedContexts()); + this.excludedContexts = new ArrayList<>(from.getIncludedContexts()); + this.salt = from.getSalt(); + this.rules = new ArrayList<>(from.getRules()); + this.version = from.getVersion(); + this.deleted = from.isDeleted(); + } + + public Segment build() { + Segment s = new Segment(key, included, excluded, includedContexts, excludedContexts, + salt, rules, version, deleted, unbounded, unboundedContextKind, generation); + if (!disablePreprocessing) { + s.afterDeserialized(); + } + return s; + } + + public SegmentBuilder disablePreprocessing(boolean disable) { + this.disablePreprocessing = disable; + return this; + } + + public SegmentBuilder included(String... included) { + this.included.addAll(asList(included)); + return this; + } + + public SegmentBuilder excluded(String... excluded) { + this.excluded.addAll(asList(excluded)); + return this; + } + + public SegmentBuilder includedContexts(ContextKind contextKind, String... keys) { + this.includedContexts.add(new SegmentTarget(contextKind, ImmutableSet.copyOf(keys))); + return this; + } + + public SegmentBuilder excludedContexts(ContextKind contextKind, String... keys) { + this.excludedContexts.add(new SegmentTarget(contextKind, ImmutableSet.copyOf(keys))); + return this; + } + + public SegmentBuilder salt(String salt) { + this.salt = salt; + return this; + } + + public SegmentBuilder rules(DataModel.SegmentRule... rules) { + this.rules = Arrays.asList(rules); + return this; + } + + public SegmentBuilder version(int version) { + this.version = version; + return this; + } + + public SegmentBuilder deleted(boolean deleted) { + this.deleted = deleted; + return this; + } + + public SegmentBuilder unbounded(boolean unbounded) { + this.unbounded = unbounded; + return this; + } + + public SegmentBuilder unboundedContextKind(ContextKind unboundedContextKind) { + this.unboundedContextKind = unboundedContextKind; + return this; + } + + public SegmentBuilder generation(Integer generation) { + this.generation = generation; + return this; + } + } + + public static class SegmentRuleBuilder { + private List clauses = new ArrayList<>(); + private Integer weight; + private ContextKind rolloutContextKind; + private AttributeRef bucketBy; + + private SegmentRuleBuilder() { + } + + public SegmentRule build() { + return new SegmentRule(clauses, weight, rolloutContextKind, bucketBy); + } + + public SegmentRuleBuilder clauses(DataModel.Clause... clauses) { + this.clauses = ImmutableList.copyOf(clauses); + return this; + } + + public SegmentRuleBuilder weight(Integer weight) { + this.weight = weight; + return this; + } + + public SegmentRuleBuilder rolloutContextKind(ContextKind rolloutContextKind) { + this.rolloutContextKind = rolloutContextKind; + return this; + } + + public SegmentRuleBuilder bucketBy(AttributeRef bucketBy) { + this.bucketBy = bucketBy; + return this; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperOtherTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperOtherTest.java new file mode 100644 index 0000000..374198b --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperOtherTest.java @@ -0,0 +1,115 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.integrations.MockPersistentDataStore; +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder.StaleValuesPolicy; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.testhelpers.TypeBehavior; + +import org.junit.Test; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; + +import static com.launchdarkly.sdk.server.DataStoreTestTypes.TEST_ITEMS; +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; + +/** + * These tests are for PersistentDataStoreWrapper functionality that doesn't fit into the parameterized + * PersistentDataStoreWrapperTest suite. + */ +@SuppressWarnings("javadoc") +public class PersistentDataStoreWrapperOtherTest extends BaseTest { + private static final RuntimeException FAKE_ERROR = new RuntimeException("fake error"); + + private final MockPersistentDataStore core; + + public PersistentDataStoreWrapperOtherTest() { + this.core = new MockPersistentDataStore(); + } + + private PersistentDataStoreWrapper makeWrapper(Duration cacheTtl, StaleValuesPolicy policy) { + return new PersistentDataStoreWrapper( + core, + cacheTtl, + policy, + false, + status -> {}, + sharedExecutor, + testLogger + ); + } + + @Test + public void cacheKeyEquality() { + List> allPermutations = new ArrayList<>(); + for (DataKind kind: new DataKind[] { DataModel.FEATURES, DataModel.SEGMENTS }) { + for (String key: new String[] { "a", "b" }) { + allPermutations.add(() -> PersistentDataStoreWrapper.CacheKey.forItem(kind, key)); + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void cacheInRefreshModeRefreshesExpiredItem() throws Exception { + try (PersistentDataStoreWrapper wrapper = makeWrapper(Duration.ofMillis(20), StaleValuesPolicy.REFRESH)) { + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = new TestItem(itemv1.key, 2); + core.forceSet(TEST_ITEMS, itemv1); + + assertEquals(0, core.getQueryCount); + + ItemDescriptor result1 = wrapper.get(TEST_ITEMS, itemv1.key); + assertThat(result1, equalTo(itemv1.toItemDescriptor())); + assertEquals(1, core.getQueryCount); + + // item is now in the cache + // change the item in the underlying store + core.forceSet(TEST_ITEMS, itemv2); + + // wait for the cached item to expire + Thread.sleep(50); + + // it has not yet tried to requery the store, because we didn't use ASYNC_REFRESH + assertEquals(1, core.getQueryCount); + + // try to get it again - it refreshes the cache with the new data + ItemDescriptor result2 = wrapper.get(TEST_ITEMS, itemv1.key); + assertThat(result2, equalTo(itemv2.toItemDescriptor())); + } + } + + @Test + public void cacheInRefreshModeKeepsExpiredItemInCacheIfRefreshFails() throws Exception { + try (PersistentDataStoreWrapper wrapper = makeWrapper(Duration.ofMillis(20), StaleValuesPolicy.REFRESH)) { + TestItem item = new TestItem("key", 1); + core.forceSet(TEST_ITEMS, item); + + assertEquals(0, core.getQueryCount); + + ItemDescriptor result1 = wrapper.get(TEST_ITEMS, item.key); + assertThat(result1, equalTo(item.toItemDescriptor())); + assertEquals(1, core.getQueryCount); + + // item is now in the cache + // now make it so the core will return an error if get() is called + core.fakeError = FAKE_ERROR; + + // wait for the cached item to expire + Thread.sleep(50); + + // it has not yet tried to requery the store, because we didn't use REFRESH_ASYNC + assertEquals(1, core.getQueryCount); + + // try to get it again - the query fails, but in REFRESH mode it swallows the error and keeps the old cached value + ItemDescriptor result2 = wrapper.get(TEST_ITEMS, item.key); + assertThat(result2, equalTo(item.toItemDescriptor())); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperTest.java new file mode 100644 index 0000000..d4cf12c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PersistentDataStoreWrapperTest.java @@ -0,0 +1,731 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.integrations.MockPersistentDataStore; +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import java.io.IOException; +import java.time.Duration; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +import static com.launchdarkly.sdk.server.DataStoreTestTypes.TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toDataMap; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toItemsMap; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toSerialized; +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; +import static org.junit.Assume.assumeThat; + +@SuppressWarnings("javadoc") +@RunWith(Parameterized.class) +public class PersistentDataStoreWrapperTest extends BaseTest { + private static final RuntimeException FAKE_ERROR = new RuntimeException("fake error"); + + private final TestMode testMode; + private final MockPersistentDataStore core; + private final PersistentDataStoreWrapper wrapper; + private final EventBroadcasterImpl statusBroadcaster; + private final DataStoreUpdatesImpl dataStoreUpdates; + private final DataStoreStatusProvider dataStoreStatusProvider; + + static class TestMode { + final boolean cached; + final boolean cachedIndefinitely; + final boolean persistOnlyAsString; + + TestMode(boolean cached, boolean cachedIndefinitely, boolean persistOnlyAsString) { + this.cached = cached; + this.cachedIndefinitely = cachedIndefinitely; + this.persistOnlyAsString = persistOnlyAsString; + } + + boolean isCached() { + return cached; + } + + boolean isCachedWithFiniteTtl() { + return cached && !cachedIndefinitely; + } + + boolean isCachedIndefinitely() { + return cached && cachedIndefinitely; + } + + Duration getCacheTtl() { + return cached ? (cachedIndefinitely ? Duration.ofMillis(-1) : Duration.ofSeconds(30)) : Duration.ZERO; + } + + @Override + public String toString() { + return "TestMode(" + + (cached ? (cachedIndefinitely ? "CachedIndefinitely" : "Cached") : "Uncached") + + (persistOnlyAsString ? ",persistOnlyAsString" : "") + ")"; + } + } + + @Parameters(name="cached={0}") + public static Iterable data() { + return ImmutableList.of( + new TestMode(true, false, false), + new TestMode(true, false, true), + new TestMode(true, true, false), + new TestMode(true, true, true), + new TestMode(false, false, false), + new TestMode(false, false, true) + ); + } + + public PersistentDataStoreWrapperTest(TestMode testMode) { + this.testMode = testMode; + this.core = new MockPersistentDataStore(); + this.core.persistOnlyAsString = testMode.persistOnlyAsString; + this.wrapper = new PersistentDataStoreWrapper( + core, + testMode.getCacheTtl(), + PersistentDataStoreBuilder.StaleValuesPolicy.EVICT, + false, + this::updateStatus, + sharedExecutor, + testLogger + ); + this.statusBroadcaster = EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, testLogger); + this.dataStoreUpdates = new DataStoreUpdatesImpl(statusBroadcaster); + this.dataStoreStatusProvider = new DataStoreStatusProviderImpl(wrapper, dataStoreUpdates); + } + + private void updateStatus(DataStoreStatusProvider.Status status) { + dataStoreUpdates.updateStatus(status); + } + + @After + public void tearDown() throws IOException { + this.wrapper.close(); + } + + @Test + public void get() { + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = itemv1.withVersion(2); + + core.forceSet(TEST_ITEMS, itemv1); + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv1.toItemDescriptor())); + + core.forceSet(TEST_ITEMS, itemv2); + + // if cached, we will not see the new underlying value yet + ItemDescriptor result = wrapper.get(TEST_ITEMS, itemv1.key); + ItemDescriptor expected = (testMode.isCached() ? itemv1 : itemv2).toItemDescriptor(); + assertThat(result, equalTo(expected)); + } + + @Test + public void getDeletedItem() { + String key = "key"; + + core.forceSet(TEST_ITEMS, key, toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(1))); + assertThat(wrapper.get(TEST_ITEMS, key), equalTo(ItemDescriptor.deletedItem(1))); + + TestItem itemv2 = new TestItem(key, 2); + core.forceSet(TEST_ITEMS, itemv2); + + // if cached, we will not see the new underlying value yet + ItemDescriptor result = wrapper.get(TEST_ITEMS, key); + ItemDescriptor expected = testMode.isCached() ? ItemDescriptor.deletedItem(1) : itemv2.toItemDescriptor(); + assertThat(result, equalTo(expected)); + } + + @Test + public void getMissingItem() { + String key = "key"; + + assertThat(wrapper.get(TEST_ITEMS, key), nullValue()); + + TestItem item = new TestItem(key, 1); + core.forceSet(TEST_ITEMS, item); + + // if cached, the cache can retain a null result + ItemDescriptor result = wrapper.get(TEST_ITEMS, item.key); + assertThat(result, testMode.isCached() ? nullValue(ItemDescriptor.class) : equalTo(item.toItemDescriptor())); + } + + @Test + public void cachedGetUsesValuesFromInit() { + assumeThat(testMode.isCached(), is(true)); + + TestItem item1 = new TestItem("key1", 1); + TestItem item2 = new TestItem("key2", 1); + wrapper.init(new DataBuilder().add(TEST_ITEMS, item1, item2).build()); + + core.forceRemove(TEST_ITEMS, item1.key); + + assertThat(wrapper.get(TEST_ITEMS, item1.key), equalTo(item1.toItemDescriptor())); + } + + @Test + public void getAll() { + TestItem item1 = new TestItem("key1", 1); + TestItem item2 = new TestItem("key2", 1); + + core.forceSet(TEST_ITEMS, item1); + core.forceSet(TEST_ITEMS, item2); + Map items = toItemsMap(wrapper.getAll(TEST_ITEMS)); + Map expected = ImmutableMap.of( + item1.key, item1.toItemDescriptor(), item2.key, item2.toItemDescriptor()); + assertThat(items, equalTo(expected)); + + core.forceRemove(TEST_ITEMS, item2.key); + items = toItemsMap(wrapper.getAll(TEST_ITEMS)); + if (testMode.isCached()) { + assertThat(items, equalTo(expected)); + } else { + Map expected1 = ImmutableMap.of(item1.key, item1.toItemDescriptor()); + assertThat(items, equalTo(expected1)); + } + } + + @Test + public void getAllDoesNotRemoveDeletedItems() { + String key1 = "key1", key2 = "key2"; + TestItem item1 = new TestItem(key1, 1); + + core.forceSet(TEST_ITEMS, item1); + core.forceSet(TEST_ITEMS, key2, toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(1))); + Map items = toItemsMap(wrapper.getAll(TEST_ITEMS)); + Map expected = ImmutableMap.of( + key1, item1.toItemDescriptor(), key2, ItemDescriptor.deletedItem(1)); + assertThat(items, equalTo(expected)); + } + + @Test + public void cachedAllUsesValuesFromInit() { + assumeThat(testMode.isCached(), is(true)); + + TestItem item1 = new TestItem("key1", 1); + TestItem item2 = new TestItem("key2", 1); + FullDataSet allData = new DataBuilder().add(TEST_ITEMS, item1, item2).build(); + wrapper.init(allData); + + core.forceRemove(TEST_ITEMS, item2.key); + + Map items = toItemsMap(wrapper.getAll(TEST_ITEMS)); + Map expected = toDataMap(allData).get(TEST_ITEMS); + assertThat(items, equalTo(expected)); + } + + @Test + public void cachedStoreWithFiniteTtlDoesNotUpdateCacheIfCoreInitFails() { + assumeThat(testMode.isCachedWithFiniteTtl(), is(true)); + + TestItem item = new TestItem("key", 1); + + core.fakeError = FAKE_ERROR; + try { + wrapper.init(new DataBuilder().add(TEST_ITEMS, item).build()); + fail("expected exception"); + } catch(RuntimeException e) { + assertThat(e, is(FAKE_ERROR)); + } + + core.fakeError = null; + assertThat(toItemsMap(wrapper.getAll(TEST_ITEMS)).size(), equalTo(0)); + } + + @Test + public void cachedStoreWithInfiniteTtlUpdatesCacheEvenIfCoreInitFails() { + assumeThat(testMode.isCachedIndefinitely(), is(true)); + + TestItem item = new TestItem("key", 1); + + core.fakeError = FAKE_ERROR; + try { + wrapper.init(new DataBuilder().add(TEST_ITEMS, item).build()); + fail("expected exception"); + } catch(RuntimeException e) { + assertThat(e, is(FAKE_ERROR)); + } + + core.fakeError = null; + Map expected = ImmutableMap.of(item.key, item.toItemDescriptor()); + assertThat(toItemsMap(wrapper.getAll(TEST_ITEMS)), equalTo(expected)); + } + + @Test + public void upsertSuccessful() { + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = itemv1.withVersion(2); + + wrapper.upsert(TEST_ITEMS, itemv1.key, itemv1.toItemDescriptor()); + assertThat(core.data.get(TEST_ITEMS).get(itemv1.key), equalTo(itemv1.toSerializedItemDescriptor())); + + wrapper.upsert(TEST_ITEMS, itemv1.key, itemv2.toItemDescriptor()); + assertThat(core.data.get(TEST_ITEMS).get(itemv1.key), equalTo(itemv2.toSerializedItemDescriptor())); + + // if we have a cache, verify that the new item is now cached by writing a different value + // to the underlying data - Get should still return the cached item + if (testMode.isCached()) { + TestItem itemv3 = itemv1.withVersion(3); + core.forceSet(TEST_ITEMS, itemv3); + } + + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv2.toItemDescriptor())); + } + + @Test + public void cachedUpsertUnsuccessful() { + assumeThat(testMode.isCached(), is(true)); + + // This is for an upsert where the data in the store has a higher version. In an uncached + // store, this is just a no-op as far as the wrapper is concerned so there's nothing to + // test here. In a cached store, we need to verify that the cache has been refreshed + // using the data that was found in the store. + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = itemv1.withVersion(2); + + wrapper.upsert(TEST_ITEMS, itemv1.key, itemv2.toItemDescriptor()); + assertThat(core.data.get(TEST_ITEMS).get(itemv2.key), equalTo(itemv2.toSerializedItemDescriptor())); + + boolean success = wrapper.upsert(TEST_ITEMS, itemv1.key, itemv1.toItemDescriptor()); + assertThat(success, is(false)); + assertThat(core.data.get(TEST_ITEMS).get(itemv1.key), equalTo(itemv2.toSerializedItemDescriptor())); // value in store remains the same + + TestItem itemv3 = itemv1.withVersion(3); + core.forceSet(TEST_ITEMS, itemv3); // bypasses cache so we can verify that itemv2 is in the cache + + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv2.toItemDescriptor())); + } + + @Test + public void cachedStoreWithFiniteTtlDoesNotUpdateCacheIfCoreUpdateFails() { + assumeThat(testMode.isCachedWithFiniteTtl(), is(true)); + + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = itemv1.withVersion(2); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, itemv1).build()); + + core.fakeError = FAKE_ERROR; + try { + wrapper.upsert(TEST_ITEMS, itemv1.key, itemv2.toItemDescriptor()); + fail("expected exception"); + } catch(RuntimeException e) { + assertThat(e, is(FAKE_ERROR)); + } + core.fakeError = null; + + // cache still has old item, same as underlying store + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv1.toItemDescriptor())); + } + + @Test + public void cachedStoreWithInfiniteTtlUpdatesCacheEvenIfCoreUpdateFails() { + assumeThat(testMode.isCachedIndefinitely(), is(true)); + + TestItem itemv1 = new TestItem("key", 1); + TestItem itemv2 = itemv1.withVersion(2); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, itemv1).build()); + + core.fakeError = FAKE_ERROR; + try { + wrapper.upsert(TEST_ITEMS, itemv1.key, itemv2.toItemDescriptor()); + Assert.fail("expected exception"); + } catch(RuntimeException e) { + assertThat(e, is(FAKE_ERROR)); + } + core.fakeError = null; + + // underlying store has old item but cache has new item + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv2.toItemDescriptor())); + } + + @Test + public void cachedStoreWithFiniteTtlRemovesCachedAllDataIfOneItemIsUpdated() { + assumeThat(testMode.isCachedWithFiniteTtl(), is(true)); + + TestItem item1v1 = new TestItem("key1", 1); + TestItem item1v2 = item1v1.withVersion(2); + TestItem item2v1 = new TestItem("key2", 1); + TestItem item2v2 = item2v1.withVersion(2); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, item1v1, item2v1).build()); + wrapper.getAll(TEST_ITEMS); // now the All data is cached + + // do an upsert for item1 - this should drop the previous all() data from the cache + wrapper.upsert(TEST_ITEMS, item1v1.key, item1v2.toItemDescriptor()); + + // modify item2 directly in the underlying data + core.forceSet(TEST_ITEMS, item2v2); + + // now, all() should reread the underlying data so we see both changes + Map expected = ImmutableMap.of( + item1v1.key, item1v2.toItemDescriptor(), item2v1.key, item2v2.toItemDescriptor()); + assertThat(toItemsMap(wrapper.getAll(TEST_ITEMS)), equalTo(expected)); + } + + @Test + public void cachedStoreWithInfiniteTtlUpdatesCachedAllDataIfOneItemIsUpdated() { + assumeThat(testMode.isCachedIndefinitely(), is(true)); + + TestItem item1v1 = new TestItem("key1", 1); + TestItem item1v2 = item1v1.withVersion(2); + TestItem item2v1 = new TestItem("key2", 1); + TestItem item2v2 = item2v1.withVersion(2); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, item1v1, item2v1).build()); + wrapper.getAll(TEST_ITEMS); // now the All data is cached + + // do an upsert for item1 - this should update the underlying data *and* the cached all() data + wrapper.upsert(TEST_ITEMS, item1v1.key, item1v2.toItemDescriptor()); + + // modify item2 directly in the underlying data + core.forceSet(TEST_ITEMS, item2v2); + + // now, all() should *not* reread the underlying data - we should only see the change to item1 + Map expected = ImmutableMap.of( + item1v1.key, item1v2.toItemDescriptor(), item2v1.key, item2v1.toItemDescriptor()); + assertThat(toItemsMap(wrapper.getAll(TEST_ITEMS)), equalTo(expected)); + } + + @Test + public void delete() { + TestItem itemv1 = new TestItem("key", 1); + + core.forceSet(TEST_ITEMS, itemv1); + assertThat(wrapper.get(TEST_ITEMS, itemv1.key), equalTo(itemv1.toItemDescriptor())); + + ItemDescriptor deletedItem = ItemDescriptor.deletedItem(2); + wrapper.upsert(TEST_ITEMS, itemv1.key, deletedItem); + + // some stores will persist a special placeholder string, others will store the metadata separately + SerializedItemDescriptor serializedDeletedItem = testMode.persistOnlyAsString ? + toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(deletedItem.getVersion())) : + new SerializedItemDescriptor(deletedItem.getVersion(), true, null); + assertThat(core.data.get(TEST_ITEMS).get(itemv1.key), equalTo(serializedDeletedItem)); + + // make a change that bypasses the cache + TestItem itemv3 = itemv1.withVersion(3); + core.forceSet(TEST_ITEMS, itemv3); + + ItemDescriptor result = wrapper.get(TEST_ITEMS, itemv1.key); + assertThat(result, equalTo(testMode.isCached() ? deletedItem : itemv3.toItemDescriptor())); + } + + @Test + public void initializedCallsInternalMethodOnlyIfNotAlreadyInited() { + assumeThat(testMode.isCached(), is(false)); + + assertThat(wrapper.isInitialized(), is(false)); + assertThat(core.initedQueryCount, equalTo(1)); + + core.inited.set(true); + assertThat(wrapper.isInitialized(), is(true)); + assertThat(core.initedQueryCount, equalTo(2)); + + core.inited.set(false); + assertThat(wrapper.isInitialized(), is(true)); + assertThat(core.initedQueryCount, equalTo(2)); + } + + @Test + public void initializedDoesNotCallInternalMethodAfterInitHasBeenCalled() { + assumeThat(testMode.isCached(), is(false)); + + assertThat(wrapper.isInitialized(), is(false)); + assertThat(core.initedQueryCount, equalTo(1)); + + wrapper.init(new DataBuilder().build()); + + assertThat(wrapper.isInitialized(), is(true)); + assertThat(core.initedQueryCount, equalTo(1)); + } + + @Test + public void initializedCanCacheFalseResult() throws Exception { + assumeThat(testMode.isCached(), is(true)); + + // We need to create a different object for this test so we can set a short cache TTL + try (PersistentDataStoreWrapper wrapper1 = new PersistentDataStoreWrapper( + core, + Duration.ofMillis(500), + PersistentDataStoreBuilder.StaleValuesPolicy.EVICT, + false, + this::updateStatus, + sharedExecutor, + testLogger + )) { + assertThat(wrapper1.isInitialized(), is(false)); + assertThat(core.initedQueryCount, equalTo(1)); + + core.inited.set(true); + assertThat(core.initedQueryCount, equalTo(1)); + + Thread.sleep(600); + + assertThat(wrapper1.isInitialized(), is(true)); + assertThat(core.initedQueryCount, equalTo(2)); + + // From this point on it should remain true and the method should not be called + assertThat(wrapper1.isInitialized(), is(true)); + assertThat(core.initedQueryCount, equalTo(2)); + } + } + + @Test + public void isInitializedCatchesException() throws Exception { + core.fakeError = FAKE_ERROR; + + assertThat(wrapper.isInitialized(), is(false)); + } + + @Test + public void canGetCacheStats() throws Exception { + try (PersistentDataStoreWrapper w = new PersistentDataStoreWrapper( + core, + testMode.getCacheTtl(), + PersistentDataStoreBuilder.StaleValuesPolicy.EVICT, + true, + this::updateStatus, + sharedExecutor, + testLogger + )) { + CacheStats stats = w.getCacheStats(); + + if (!testMode.isCached()) { + assertNull(stats); + return; + } + + assertThat(stats, equalTo(new CacheStats(0, 0, 0, 0, 0, 0))); + + // Cause a cache miss + w.get(TEST_ITEMS, "key1"); + stats = w.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(0L)); + assertThat(stats.getMissCount(), equalTo(1L)); + assertThat(stats.getLoadSuccessCount(), equalTo(1L)); // even though it's a miss, it's a "success" because there was no exception + assertThat(stats.getLoadExceptionCount(), equalTo(0L)); + + // Cause a cache hit + core.forceSet(TEST_ITEMS, new TestItem("key2", 1)); + w.get(TEST_ITEMS, "key2"); // this one is a cache miss, but causes the item to be loaded and cached + w.get(TEST_ITEMS, "key2"); // now it's a cache hit + stats = w.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(1L)); + assertThat(stats.getMissCount(), equalTo(2L)); + assertThat(stats.getLoadSuccessCount(), equalTo(2L)); + assertThat(stats.getLoadExceptionCount(), equalTo(0L)); + + // Cause a load exception + core.fakeError = new RuntimeException("sorry"); + try { + w.get(TEST_ITEMS, "key3"); // cache miss -> tries to load the item -> gets an exception + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e, is((Throwable)core.fakeError)); + } + stats = w.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(1L)); + assertThat(stats.getMissCount(), equalTo(3L)); + assertThat(stats.getLoadSuccessCount(), equalTo(2L)); + assertThat(stats.getLoadExceptionCount(), equalTo(1L)); + } + } + + @Test + public void statusIsOkInitially() throws Exception { + DataStoreStatusProvider.Status status = dataStoreStatusProvider.getStatus(); + assertThat(status.isAvailable(), is(true)); + assertThat(status.isRefreshNeeded(), is(false)); + } + + @Test + public void statusIsUnavailableAfterError() throws Exception { + causeStoreError(core, wrapper); + + DataStoreStatusProvider.Status status = dataStoreStatusProvider.getStatus(); + assertThat(status.isAvailable(), is(false)); + assertThat(status.isRefreshNeeded(), is(false)); + } + + @Test + public void statusListenerIsNotifiedOnFailureAndRecovery() throws Exception { + final BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataStoreStatusProvider.addStatusListener(statuses::add); + + causeStoreError(core, wrapper); + + DataStoreStatusProvider.Status status1 = statuses.take(); + assertThat(status1.isAvailable(), is(false)); + assertThat(status1.isRefreshNeeded(), is(false)); + + // Trigger another error, just to show that it will *not* publish a redundant status update since it + // is already in a failed state + causeStoreError(core, wrapper); + + // Now simulate the data store becoming OK again; the poller detects this and publishes a new status + makeStoreAvailable(core); + DataStoreStatusProvider.Status status2 = statuses.take(); + assertThat(status2.isAvailable(), is(true)); + assertThat(status2.isRefreshNeeded(), is(!testMode.isCachedIndefinitely())); + } + + @Test + public void cacheIsWrittenToStoreAfterRecoveryIfTtlIsInfinite() throws Exception { + assumeThat(testMode.isCachedIndefinitely(), is(true)); + + final BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataStoreStatusProvider.addStatusListener(statuses::add); + + TestItem item1v1 = new TestItem("key1", 1); + TestItem item1v2 = item1v1.withVersion(2); + TestItem item2 = new TestItem("key2", 1); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, item1v1).build()); + + // In infinite cache mode, we do *not* expect exceptions thrown by the store to be propagated; it will + // swallow the error, but also go into polling/recovery mode. Note that even though the store rejects + // this update, it'll still be cached. + causeStoreError(core, wrapper); + try { + wrapper.upsert(TEST_ITEMS, item1v1.key, item1v2.toItemDescriptor()); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo(FAKE_ERROR.getMessage())); + } + assertThat(wrapper.get(TEST_ITEMS, item1v1.key), equalTo(item1v2.toItemDescriptor())); + + DataStoreStatusProvider.Status status1 = statuses.take(); + assertThat(status1.isAvailable(), is(false)); + assertThat(status1.isRefreshNeeded(), is(false)); + + // While the store is still down, try to update it again - the update goes into the cache + try { + wrapper.upsert(TEST_ITEMS, item2.key, item2.toItemDescriptor()); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo(FAKE_ERROR.getMessage())); + } + assertThat(wrapper.get(TEST_ITEMS, item2.key), equalTo(item2.toItemDescriptor())); + + // Verify that this update did not go into the underlying data yet + assertThat(core.data.get(TEST_ITEMS).get(item2.key), nullValue()); + + // Now simulate the store coming back up + makeStoreAvailable(core); + + // Wait for the poller to notice this and publish a new status + DataStoreStatusProvider.Status status2 = statuses.take(); + assertThat(status2.isAvailable(), is(true)); + assertThat(status2.isRefreshNeeded(), is(false)); + + // Once that has happened, the cache should have been written to the store + assertThat(core.data.get(TEST_ITEMS).get(item1v1.key), equalTo(item1v2.toSerializedItemDescriptor())); + assertThat(core.data.get(TEST_ITEMS).get(item2.key), equalTo(item2.toSerializedItemDescriptor())); + } + + @Test + public void statusRemainsUnavailableIfStoreSaysItIsAvailableButInitFails() throws Exception { + assumeThat(testMode.isCachedIndefinitely(), is(true)); + + // Most of this test is identical to cacheIsWrittenToStoreAfterRecoveryIfTtlIsInfinite() except as noted below. + + final BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataStoreStatusProvider.addStatusListener(statuses::add); + + TestItem item1v1 = new TestItem("key1", 1); + TestItem item1v2 = item1v1.withVersion(2); + TestItem item2 = new TestItem("key2", 1); + + wrapper.init(new DataBuilder().add(TEST_ITEMS, item1v1).build()); + + causeStoreError(core, wrapper); + try { + wrapper.upsert(TEST_ITEMS, item1v1.key, item1v2.toItemDescriptor()); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo(FAKE_ERROR.getMessage())); + } + assertThat(wrapper.get(TEST_ITEMS, item1v1.key), equalTo(item1v2.toItemDescriptor())); + + DataStoreStatusProvider.Status status1 = statuses.take(); + assertThat(status1.isAvailable(), is(false)); + assertThat(status1.isRefreshNeeded(), is(false)); + + // While the store is still down, try to update it again - the update goes into the cache + try { + wrapper.upsert(TEST_ITEMS, item2.key, item2.toItemDescriptor()); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo(FAKE_ERROR.getMessage())); + } + assertThat(wrapper.get(TEST_ITEMS, item2.key), equalTo(item2.toItemDescriptor())); + + // Verify that this update did not go into the underlying data yet + assertThat(core.data.get(TEST_ITEMS).get(item2.key), nullValue()); + + // Here's what is unique to this test: we are telling the store to report its status as "available", + // but *not* clearing the fake exception, so when the poller tries to write the cached data with + // init() it should fail. + core.unavailable = false; + + // We can't prove that an unwanted status transition will never happen, but we can verify that it + // does not happen within two status poll intervals. + Thread.sleep(PersistentDataStoreStatusManager.POLL_INTERVAL_MS * 2); + + assertThat(statuses.isEmpty(), is(true)); + int initedCount = core.initedCount.get(); + assertThat(initedCount, greaterThan(1)); // that is, it *tried* to do at least one init + + // Now simulate the store coming back up and actually working + core.fakeError = null; + + // Wait for the poller to notice this and publish a new status + DataStoreStatusProvider.Status status2 = statuses.take(); + assertThat(status2.isAvailable(), is(true)); + assertThat(status2.isRefreshNeeded(), is(false)); + + // Once that has happened, the cache should have been written to the store + assertThat(core.data.get(TEST_ITEMS).get(item1v1.key), equalTo(item1v2.toSerializedItemDescriptor())); + assertThat(core.data.get(TEST_ITEMS).get(item2.key), equalTo(item2.toSerializedItemDescriptor())); + assertThat(core.initedCount.get(), greaterThan(initedCount)); + } + + private void causeStoreError(MockPersistentDataStore core, PersistentDataStoreWrapper w) { + core.unavailable = true; + core.fakeError = new RuntimeException(FAKE_ERROR.getMessage()); + try { + wrapper.upsert(TEST_ITEMS, "irrelevant-key", ItemDescriptor.deletedItem(1)); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getMessage(), equalTo(FAKE_ERROR.getMessage())); + } + } + + private void makeStoreAvailable(MockPersistentDataStore core) { + core.fakeError = null; + core.unavailable = false; + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PollingProcessorTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PollingProcessorTest.java new file mode 100644 index 0000000..39c5dbd --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/PollingProcessorTest.java @@ -0,0 +1,405 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.TestComponents.MockDataSourceUpdates; +import com.launchdarkly.sdk.server.TestComponents.MockDataStoreStatusProvider; +import com.launchdarkly.sdk.server.TestUtil.ActionCanThrowAnyException; +import com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.testhelpers.ConcurrentHelpers; +import com.launchdarkly.testhelpers.httptest.Handler; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestContext; +import com.launchdarkly.testhelpers.tcptest.TcpHandler; +import com.launchdarkly.testhelpers.tcptest.TcpHandlers; +import com.launchdarkly.testhelpers.tcptest.TcpServer; + +import org.junit.Before; +import org.junit.Test; + +import java.net.URI; +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.dataStoreThatThrowsException; +import static com.launchdarkly.sdk.server.TestComponents.defaultHttpProperties; +import static com.launchdarkly.sdk.server.TestComponents.sharedExecutor; +import static com.launchdarkly.sdk.server.TestUtil.assertDataSetEquals; +import static com.launchdarkly.sdk.server.TestUtil.requireDataSourceStatus; +import static com.launchdarkly.sdk.server.TestUtil.requireDataSourceStatusEventually; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertFutureIsCompleted; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class PollingProcessorTest extends BaseTest { + private static final String SDK_KEY = "sdk-key"; + private static final Duration LENGTHY_INTERVAL = Duration.ofSeconds(60); + private static final Duration BRIEF_INTERVAL = Duration.ofMillis(20); + + private MockDataSourceUpdates dataSourceUpdates; + + @Before + public void setup() { + DataStore store = new InMemoryDataStore(); + dataSourceUpdates = TestComponents.dataSourceUpdates(store, new MockDataStoreStatusProvider()); + } + + private PollingProcessor makeProcessor(URI baseUri, Duration pollInterval) { + FeatureRequestor requestor = new DefaultFeatureRequestor(defaultHttpProperties(), baseUri, null, testLogger); + return new PollingProcessor(requestor, dataSourceUpdates, sharedExecutor, pollInterval, testLogger); + } + + private static class TestPollHandler implements Handler { + private final String data; + private volatile int errorStatus; + + public TestPollHandler() { + this(DataBuilder.forStandardTypes()); + } + + public TestPollHandler(DataBuilder data) { + this.data = data.buildJson().toJsonString(); + } + + @Override + public void apply(RequestContext context) { + int err = errorStatus; + if (err == 0) { + Handlers.bodyJson(data).apply(context); + } else { + context.setStatus(err); + } + } + + public void setError(int status) { + this.errorStatus = status; + } + } + + @Test + public void builderHasDefaultConfiguration() throws Exception { + ComponentConfigurer f = Components.pollingDataSource(); + try (PollingProcessor pp = (PollingProcessor)f.build(clientContext(SDK_KEY, baseConfig().build()))) { + assertThat(((DefaultFeatureRequestor)pp.requestor).pollingUri.toString(), containsString(StandardEndpoints.DEFAULT_POLLING_BASE_URI.toString())); + assertThat(pp.pollInterval, equalTo(PollingDataSourceBuilder.DEFAULT_POLL_INTERVAL)); + } + } + + @Test + public void builderCanSpecifyConfiguration() throws Exception { + + ComponentConfigurer f = Components.pollingDataSource() + .pollInterval(LENGTHY_INTERVAL) + .payloadFilter("myFilter"); + + try (PollingProcessor pp = (PollingProcessor) f.build( + clientContext( + SDK_KEY, + baseConfig().build()))) { + assertThat(pp.pollInterval, equalTo(LENGTHY_INTERVAL)); + assertThat(((DefaultFeatureRequestor) pp.requestor).pollingUri.toString(), containsString("filter=myFilter")); + } + } + + @Test + public void successfulPolls() throws Exception { + FeatureFlag flagv1 = ModelBuilders.flagBuilder("flag").version(1).build(); + FeatureFlag flagv2 = ModelBuilders.flagBuilder(flagv1.getKey()).version(2).build(); + DataBuilder datav1 = DataBuilder.forStandardTypes().addAny(DataModel.FEATURES, flagv1); + DataBuilder datav2 = DataBuilder.forStandardTypes().addAny(DataModel.FEATURES, flagv2); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + Semaphore allowSecondPollToProceed = new Semaphore(0); + + Handler pollingHandler = Handlers.sequential( + new TestPollHandler(datav1), + Handlers.all( + Handlers.waitFor(allowSecondPollToProceed), + new TestPollHandler(datav2) + ), + Handlers.hang() // we don't want any more polls to complete after the second one + ); + + try (HttpServer server = HttpServer.start(pollingHandler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), Duration.ofMillis(100))) { + Future initFuture = pollingProcessor.start(); + assertFutureIsCompleted(initFuture, 1, TimeUnit.SECONDS); + + assertTrue(pollingProcessor.isInitialized()); + assertDataSetEquals(datav1.build(), dataSourceUpdates.awaitInit()); + + allowSecondPollToProceed.release(); + + assertDataSetEquals(datav2.build(), dataSourceUpdates.awaitInit()); + } + } + } + + @Test + public void testTimeoutFromConnectionProblem() throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + Handler successHandler = new TestPollHandler(); // it should time out before reaching this + + try (HttpServer server = HttpServer.start(successHandler)) { + TcpHandler errorThenSuccess = TcpHandlers.sequential( + TcpHandlers.noResponse(), // this will cause an IOException due to closing the connection without a response + TcpHandlers.forwardToPort(server.getPort()) + ); + try (TcpServer forwardingServer = TcpServer.start(errorThenSuccess)) { + try (PollingProcessor pollingProcessor = makeProcessor(forwardingServer.getHttpUri(), LENGTHY_INTERVAL)) { + Future initFuture = pollingProcessor.start(); + ConcurrentHelpers.assertFutureIsNotCompleted(initFuture, 200, TimeUnit.MILLISECONDS); + assertFalse(initFuture.isDone()); + assertFalse(pollingProcessor.isInitialized()); + assertEquals(0, dataSourceUpdates.receivedInits.size()); + + Status status = requireDataSourceStatus(statuses, State.INITIALIZING); + assertNotNull(status.getLastError()); + assertEquals(ErrorKind.NETWORK_ERROR, status.getLastError().getKind()); + } + } + } + } + + @Test + public void testDataStoreFailure() throws Exception { + DataStore badStore = dataStoreThatThrowsException(new RuntimeException("sorry")); + DataStoreStatusProvider badStoreStatusProvider = new MockDataStoreStatusProvider(false); + dataSourceUpdates = TestComponents.dataSourceUpdates(badStore, badStoreStatusProvider); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + try (HttpServer server = HttpServer.start(new TestPollHandler())) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), LENGTHY_INTERVAL)) { + pollingProcessor.start(); + + assertDataSetEquals(DataBuilder.forStandardTypes().build(), dataSourceUpdates.awaitInit()); + + assertFalse(pollingProcessor.isInitialized()); + + Status status = requireDataSourceStatus(statuses, State.INITIALIZING); + assertNotNull(status.getLastError()); + assertEquals(ErrorKind.STORE_ERROR, status.getLastError().getKind()); + } + } + } + + @Test + public void testMalformedData() throws Exception { + Handler badDataHandler = Handlers.bodyJson("{bad"); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + try (HttpServer server = HttpServer.start(badDataHandler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), LENGTHY_INTERVAL)) { + pollingProcessor.start(); + + Status status = requireDataSourceStatus(statuses, State.INITIALIZING); + assertNotNull(status.getLastError()); + assertEquals(ErrorKind.INVALID_DATA, status.getLastError().getKind()); + + assertFalse(pollingProcessor.isInitialized()); + } + } + } + + @Test + public void startingWhenAlreadyStartedDoesNothing() throws Exception { + try (HttpServer server = HttpServer.start(new TestPollHandler())) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), LENGTHY_INTERVAL)) { + Future initFuture1 = pollingProcessor.start(); + assertFutureIsCompleted(initFuture1, 1, TimeUnit.SECONDS); + server.getRecorder().requireRequest(); + + Future initFuture2 = pollingProcessor.start(); + assertSame(initFuture1, initFuture2); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + @Test + public void http400ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(400); + } + + @Test + public void http401ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(401); + } + + @Test + public void http403ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(403); + } + + @Test + public void http408ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(408); + } + + @Test + public void http429ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(429); + } + + @Test + public void http500ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(500); + } + + private void testUnrecoverableHttpError(int statusCode) throws Exception { + TestPollHandler handler = new TestPollHandler(); + + // Test a scenario where the very first request gets this error + handler.setError(statusCode); + withStatusQueue(statuses -> { + try (HttpServer server = HttpServer.start(handler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), BRIEF_INTERVAL)) { + long startTime = System.currentTimeMillis(); + Future initFuture = pollingProcessor.start(); + + assertFutureIsCompleted(initFuture, 2, TimeUnit.SECONDS); + assertTrue((System.currentTimeMillis() - startTime) < 9000); + assertTrue(initFuture.isDone()); + assertFalse(pollingProcessor.isInitialized()); + + verifyHttpErrorCausedShutdown(statuses, statusCode); + + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + }); + + // Now test a scenario where we have a successful startup, but a subsequent poll gets the error + handler.setError(0); + dataSourceUpdates = TestComponents.dataSourceUpdates(new InMemoryDataStore(), new MockDataStoreStatusProvider()); + withStatusQueue(statuses -> { + try (HttpServer server = HttpServer.start(handler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), BRIEF_INTERVAL)) { + Future initFuture = pollingProcessor.start(); + + assertFutureIsCompleted(initFuture, 2, TimeUnit.SECONDS); + assertTrue(initFuture.isDone()); + assertTrue(pollingProcessor.isInitialized()); + requireDataSourceStatus(statuses, State.VALID); + + // now make it so polls fail + handler.setError(statusCode); + + verifyHttpErrorCausedShutdown(statuses, statusCode); + while (server.getRecorder().count() > 0) { + server.getRecorder().requireRequest(); + } + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + }); + } + + private void verifyHttpErrorCausedShutdown(BlockingQueue statuses, int statusCode) { + Status status = requireDataSourceStatusEventually(statuses, State.OFF, State.VALID); + assertNotNull(status.getLastError()); + assertEquals(ErrorKind.ERROR_RESPONSE, status.getLastError().getKind()); + assertEquals(statusCode, status.getLastError().getStatusCode()); + } + + private void testRecoverableHttpError(int statusCode) throws Exception { + TestPollHandler handler = new TestPollHandler(); + + // Test a scenario where the very first request gets this error + handler.setError(statusCode); + withStatusQueue(statuses -> { + try (HttpServer server = HttpServer.start(handler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), BRIEF_INTERVAL)) { + Future initFuture = pollingProcessor.start(); + + // make sure it's done a couple of polls (which will have failed) + server.getRecorder().requireRequest(); + server.getRecorder().requireRequest(); + + // now make it so polls will succeed + handler.setError(0); + + assertFutureIsCompleted(initFuture, 1, TimeUnit.SECONDS); + + // verify that it got the error + Status status0 = requireDataSourceStatus(statuses, State.INITIALIZING); + assertNotNull(status0.getLastError()); + assertEquals(ErrorKind.ERROR_RESPONSE, status0.getLastError().getKind()); + assertEquals(statusCode, status0.getLastError().getStatusCode()); + + // and then that it succeeded + requireDataSourceStatusEventually(statuses, State.VALID, State.INITIALIZING); + } + } + }); + + // Now test a scenario where we have a successful startup, but then it gets the error. + // The result is a bit different because it will report an INTERRUPTED state. + handler.setError(0); + dataSourceUpdates = TestComponents.dataSourceUpdates(new InMemoryDataStore(), new MockDataStoreStatusProvider()); + withStatusQueue(statuses -> { + try (HttpServer server = HttpServer.start(handler)) { + try (PollingProcessor pollingProcessor = makeProcessor(server.getUri(), BRIEF_INTERVAL)) { + Future initFuture = pollingProcessor.start(); + assertFutureIsCompleted(initFuture, 1, TimeUnit.SECONDS); + assertTrue(pollingProcessor.isInitialized()); + + // first poll succeeded + requireDataSourceStatus(statuses, State.VALID); + + // now make it so polls will fail + handler.setError(statusCode); + + Status status1 = requireDataSourceStatus(statuses, State.INTERRUPTED); + assertEquals(ErrorKind.ERROR_RESPONSE, status1.getLastError().getKind()); + assertEquals(statusCode, status1.getLastError().getStatusCode()); + + // and then succeed again + handler.setError(0); + requireDataSourceStatusEventually(statuses, State.VALID, State.INTERRUPTED); + } + } + }); + } + + private void withStatusQueue(ActionCanThrowAnyException> action) throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + DataSourceStatusProvider.StatusListener addStatus = statuses::add; + dataSourceUpdates.statusBroadcaster.register(addStatus); + try { + action.apply(statuses); + } finally { + dataSourceUpdates.statusBroadcaster.unregister(addStatus); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/RolloutRandomizationConsistencyTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/RolloutRandomizationConsistencyTest.java new file mode 100644 index 0000000..3e9c375 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/RolloutRandomizationConsistencyTest.java @@ -0,0 +1,115 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Rollout; +import com.launchdarkly.sdk.server.DataModel.RolloutKind; +import com.launchdarkly.sdk.server.DataModel.WeightedVariation; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static com.launchdarkly.sdk.server.EvaluatorBucketing.computeBucketValue; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.BASE_EVALUATOR; +import static com.launchdarkly.sdk.server.EvaluatorTestUtil.expectNoPrerequisiteEvals; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; + +/* + * Note: These tests are meant to be exact duplicates of tests + * in other SDKs. Do not change any of the values unless they + * are also changed in other SDKs. These are not traditional behavioral + * tests so much as consistency tests to guarantee that the implementation + * is identical across SDKs. + */ +public class RolloutRandomizationConsistencyTest { + private Integer noSeed = null; + + private static Rollout buildRollout(boolean isExperiment, boolean untrackedVariations) { + List variations = new ArrayList<>(); + variations.add(new WeightedVariation(0, 10000, untrackedVariations)); + variations.add(new WeightedVariation(1, 20000, untrackedVariations)); + variations.add(new WeightedVariation(0, 70000, true)); + RolloutKind kind = isExperiment ? RolloutKind.experiment : RolloutKind.rollout; + Integer seed = 61; + Rollout rollout = new Rollout(null, variations, null, kind, seed); + return rollout; + } + + @Test + public void variationIndexForUserInExperimentTest() { + // seed here carefully chosen so users fall into different buckets + Rollout rollout = buildRollout(true, false); + String key = "hashKey"; + String salt = "saltyA"; + + LDContext user1 = LDContext.create("userKeyA"); + // bucketVal = 0.09801207 + assertVariationIndexAndExperimentStateForRollout(0, true, rollout, user1, key, salt); + + LDContext user2 = LDContext.create("userKeyB"); + // bucketVal = 0.14483777 + assertVariationIndexAndExperimentStateForRollout(1, true, rollout, user2, key, salt); + + LDContext user3 = LDContext.create("userKeyC"); + // bucketVal = 0.9242641 + assertVariationIndexAndExperimentStateForRollout(0, false, rollout, user3, key, salt); + } + + private static void assertVariationIndexAndExperimentStateForRollout( + int expectedVariation, + boolean expectedInExperiment, + Rollout rollout, + LDContext context, + String flagKey, + String salt + ) { + FeatureFlag flag = ModelBuilders.flagBuilder(flagKey) + .on(true) + .generatedVariations(3) + .fallthrough(rollout) + .salt(salt) + .build(); + EvalResult result = BASE_EVALUATOR.evaluate(flag, context, expectNoPrerequisiteEvals()); + assertThat(result.getVariationIndex(), equalTo(expectedVariation)); + assertThat(result.getReason().getKind(), equalTo(EvaluationReason.Kind.FALLTHROUGH)); + assertThat(result.getReason().isInExperiment(), equalTo(expectedInExperiment)); + } + + @Test + public void bucketUserByKeyTest() { + LDContext user1 = LDContext.create("userKeyA"); + float point1 = computeBucketValue(false, noSeed, user1, null, "hashKey", null, "saltyA"); + assertEquals(0.42157587, point1, 0.0000001); + + LDContext user2 = LDContext.create("userKeyB"); + float point2 = computeBucketValue(false, noSeed, user2, null, "hashKey", null, "saltyA"); + assertEquals(0.6708485, point2, 0.0000001); + + LDContext user3 = LDContext.create("userKeyC"); + float point3 = computeBucketValue(false, noSeed, user3, null, "hashKey", null, "saltyA"); + assertEquals(0.10343106, point3, 0.0000001); + } + + @Test + public void bucketUserWithSeedTest() { + Integer seed = 61; + + LDContext user1 = LDContext.create("userKeyA"); + Float point1 = computeBucketValue(true, seed, user1, null, "hashKey", null, "saltyA"); + assertEquals(0.09801207, point1, 0.0000001); + + LDContext user2 = LDContext.create("userKeyB"); + Float point2 = computeBucketValue(true, seed, user2, null, "hashKey", null, "saltyA"); + assertEquals(0.14483777, point2, 0.0000001); + + LDContext user3 = LDContext.create("userKeyC"); + Float point3 = computeBucketValue(true, seed, user3, null, "hashKey", null, "saltyA"); + assertEquals(0.9242641, point3, 0.0000001); + } + +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SemanticVersionTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SemanticVersionTest.java new file mode 100644 index 0000000..9c7b937 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SemanticVersionTest.java @@ -0,0 +1,248 @@ +package com.launchdarkly.sdk.server; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.launchdarkly.sdk.server.SemanticVersion; + +import org.junit.Test; + +@SuppressWarnings("javadoc") +public class SemanticVersionTest { + @Test + public void canParseSimpleCompleteVersion() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3.4"); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(4, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test + public void canParseVersionWithPrerelease() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3.4-beta1.rc2"); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(4, sv.getPatch()); + assertEquals("beta1.rc2", sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test + public void canParseVersionWithBuild() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3.4+build2.4"); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(4, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertEquals("build2.4", sv.getBuild()); + } + + @Test + public void canParseVersionWithPrereleaseAndBuild() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3.4-beta1.rc2+build2.4"); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(4, sv.getPatch()); + assertEquals("beta1.rc2", sv.getPrerelease()); + assertEquals("build2.4", sv.getBuild()); + } + + @Test(expected = SemanticVersion.InvalidVersionException.class) + public void leadingZeroNotAllowedInMajor() throws Exception { + SemanticVersion.parse("02.3.4"); + } + + @Test(expected = SemanticVersion.InvalidVersionException.class) + public void leadingZeroNotAllowedInMinor() throws Exception { + SemanticVersion.parse("2.03.4"); + } + + @Test(expected = SemanticVersion.InvalidVersionException.class) + public void leadingZeroNotAllowedInPatch() throws Exception { + SemanticVersion.parse("2.3.04"); + } + + @Test + public void zeroByItselfIsAllowed() throws Exception { + assertEquals(0, SemanticVersion.parse("0.3.4").getMajor()); + assertEquals(0, SemanticVersion.parse("2.0.4").getMinor()); + assertEquals(0, SemanticVersion.parse("2.3.0").getPatch()); + } + + @Test + public void canParseVersionWithMajorOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2", true); + assertEquals(2, sv.getMajor()); + assertEquals(0, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test(expected=SemanticVersion.InvalidVersionException.class) + public void cannotParseVersionWithMajorOnlyIfFlagNotSet() throws Exception { + SemanticVersion.parse("2"); + } + + @Test + public void canParseVersionWithMajorAndMinorOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3", true); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test(expected=SemanticVersion.InvalidVersionException.class) + public void cannotParseVersionWithMajorAndMinorOnlyIfFlagNotSet() throws Exception { + SemanticVersion.parse("2.3"); + } + + @Test + public void canParseVersionWithMajorAndPrereleaseOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2-beta1", true); + assertEquals(2, sv.getMajor()); + assertEquals(0, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertEquals("beta1", sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test + public void canParseVersionWithMajorMinorAndPrereleaseOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3-beta1", true); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertEquals("beta1", sv.getPrerelease()); + assertNull(sv.getBuild()); + } + + @Test + public void canParseVersionWithMajorAndBuildOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2+build1", true); + assertEquals(2, sv.getMajor()); + assertEquals(0, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertEquals("build1", sv.getBuild()); + } + + @Test + public void canParseVersionWithMajorMinorAndBuildOnly() throws Exception { + SemanticVersion sv = SemanticVersion.parse("2.3+build1", true); + assertEquals(2, sv.getMajor()); + assertEquals(3, sv.getMinor()); + assertEquals(0, sv.getPatch()); + assertNull(sv.getPrerelease()); + assertEquals("build1", sv.getBuild()); + } + + @Test(expected=SemanticVersion.InvalidVersionException.class) + public void majorVersionMustBeNumeric() throws Exception { + SemanticVersion.parse("x.0.0"); + } + + @Test(expected=SemanticVersion.InvalidVersionException.class) + public void minorVersionMustBeNumeric() throws Exception { + SemanticVersion.parse("0.x.0"); + } + + @Test(expected=SemanticVersion.InvalidVersionException.class) + public void patchVersionMustBeNumeric() throws Exception { + SemanticVersion.parse("0.0.x"); + } + + @Test + public void equalVersionsHaveEqualPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1"); + assertEquals(0, sv1.comparePrecedence(sv2)); + + SemanticVersion sv3 = SemanticVersion.parse("2.3.4"); + SemanticVersion sv4 = SemanticVersion.parse("2.3.4"); + assertEquals(0, sv3.comparePrecedence(sv4)); + } + + @Test + public void lowerMajorVersionHasLowerPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("1.3.4-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void lowerMinorVersionHasLowerPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.2.4-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void lowerPatchVersionHasLowerPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.3-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void prereleaseVersionHasLowerPrecedenceThanRelease() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void shorterSubsetOfPrereleaseIdentifiersHasLowerPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1.rc1"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void numericPrereleaseIdentifiersAreSortedNumerically() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1.3"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1.23"); + assertEquals(-1, sv1.comparePrecedence(sv2)); + assertEquals(1, sv2.comparePrecedence(sv1)); + } + + @Test + public void nonNumericPrereleaseIdentifiersAreSortedAsStrings() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1.x3"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1.x23"); + assertEquals(1, sv1.comparePrecedence(sv2)); + assertEquals(-1, sv2.comparePrecedence(sv1)); + } + + @Test + public void numericPrereleaseIdentifiersAreLowerThanStrings() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1.x.100"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1.3.100"); + assertEquals(1, sv1.comparePrecedence(sv2)); + assertEquals(-1, sv2.comparePrecedence(sv1)); + } + + @Test + public void buildIdentifierDoesNotAffectPrecedence() throws Exception { + SemanticVersion sv1 = SemanticVersion.parse("2.3.4-beta1+build1"); + SemanticVersion sv2 = SemanticVersion.parse("2.3.4-beta1+build2"); + assertEquals(0, sv1.comparePrecedence(sv2)); + assertEquals(0, sv2.comparePrecedence(sv1)); + } + + @Test + public void anyVersionIsGreaterThanNull() throws Exception { + SemanticVersion sv = SemanticVersion.parse("0.0.0"); + assertEquals(1, sv.comparePrecedence(null)); + assertEquals(1, sv.compareTo(null)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEventsTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEventsTest.java new file mode 100644 index 0000000..c6aa65b --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideDiagnosticEventsTest.java @@ -0,0 +1,514 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DiagnosticDescription; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import org.junit.Test; + +import java.net.URI; +import java.time.Duration; + +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestUtil.assertJsonEquals; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonEqualsValue; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonProperty; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonUndefined; +import static com.launchdarkly.testhelpers.JsonTestValue.jsonFromValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class ServerSideDiagnosticEventsTest { + + private static final URI CUSTOM_URI = URI.create("http://1.1.1.1"); + + @Test + public void sdkDataProperties() { + LDValue sdkData = makeSdkData(LDConfig.DEFAULT); + assertThat(jsonFromValue(sdkData), allOf( + jsonProperty("name", jsonEqualsValue("java-server-sdk")), + jsonProperty("version", jsonEqualsValue(Version.SDK_VERSION)), + jsonProperty("wrapperName", jsonUndefined()), + jsonProperty("wrapperVersion", jsonUndefined()) + )); + } + + @Test + public void sdkDataWrapperProperties() { + LDConfig config1 = new LDConfig.Builder() + .http(Components.httpConfiguration().wrapper("Scala", "0.1.0")) + .build(); + LDValue sdkData1 = makeSdkData(config1); + assertThat(jsonFromValue(sdkData1), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Scala")), + jsonProperty("wrapperVersion", jsonEqualsValue("0.1.0")) + )); + + LDConfig config2 = new LDConfig.Builder() + .http(Components.httpConfiguration().wrapper("Scala", null)) + .build(); + LDValue sdkData2 = makeSdkData(config2); + assertThat(jsonFromValue(sdkData2), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Scala")), + jsonProperty("wrapperVersion", jsonUndefined()) + )); + } + + @Test + public void sdkDataWrapperPropertiesUsingWrapperInfoOverridesHttpConfig() { + LDConfig config1 = new LDConfig.Builder() + .http(Components.httpConfiguration().wrapper("Scala", "0.1.0")) + .wrapper(Components.wrapperInfo().wrapperName("Clojure").wrapperVersion("0.2.0")) + .build(); + LDValue sdkData1 = makeSdkData(config1); + assertThat(jsonFromValue(sdkData1), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Clojure")), + jsonProperty("wrapperVersion", jsonEqualsValue("0.2.0")) + )); + + LDConfig config2 = new LDConfig.Builder() + .http(Components.httpConfiguration().wrapper("Scala", null)) + .wrapper(Components.wrapperInfo().wrapperName("Clojure")) + .build(); + LDValue sdkData2 = makeSdkData(config2); + assertThat(jsonFromValue(sdkData2), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Clojure")), + jsonProperty("wrapperVersion", jsonUndefined()) + )); + } + + @Test + public void sdkDataWrapperPropertiesUsingWrapperInfo() { + LDConfig config1 = new LDConfig.Builder() + .wrapper(Components.wrapperInfo().wrapperName("Clojure").wrapperVersion("0.2.0")) + .build(); + LDValue sdkData1 = makeSdkData(config1); + assertThat(jsonFromValue(sdkData1), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Clojure")), + jsonProperty("wrapperVersion", jsonEqualsValue("0.2.0")) + )); + + LDConfig config2 = new LDConfig.Builder() + .wrapper(Components.wrapperInfo().wrapperName("Clojure")) + .build(); + LDValue sdkData2 = makeSdkData(config2); + assertThat(jsonFromValue(sdkData2), allOf( + jsonProperty("wrapperName", jsonEqualsValue("Clojure")), + jsonProperty("wrapperVersion", jsonUndefined()) + )); + } + + @Test + public void platformDataOsNames() { + String realOsName = System.getProperty("os.name"); + try { + System.setProperty("os.name", "Mac OS X"); + assertThat(jsonFromValue(makePlatformData()), + jsonProperty("osName", jsonEqualsValue("MacOS"))); + + System.setProperty("os.name", "Windows 10"); + assertThat(jsonFromValue(makePlatformData()), + jsonProperty("osName", jsonEqualsValue("Windows"))); + + System.setProperty("os.name", "Linux"); + assertThat(jsonFromValue(makePlatformData()), + jsonProperty("osName", jsonEqualsValue("Linux"))); + + System.clearProperty("os.name"); + assertThat(jsonFromValue(makePlatformData()), + jsonProperty("osName", jsonUndefined())); + } finally { + System.setProperty("os.name", realOsName); + } + } + + private ObjectBuilder expectedDefaultProperties() { + return expectedDefaultPropertiesWithoutStreaming() + .put("reconnectTimeMillis", 1_000); + } + + private ObjectBuilder expectedDefaultPropertiesWithoutStreaming() { + return LDValue.buildObject() + .put("allAttributesPrivate", false) + .put("connectTimeoutMillis", 2_000) + .put("customBaseURI", false) + .put("customEventsURI", false) + .put("customStreamURI", false) + .put("dataStoreType", "memory") + .put("diagnosticRecordingIntervalMillis", 900_000) + .put("eventsCapacity", 10_000) + .put("eventsFlushIntervalMillis",5_000) + .put("samplingInterval", 0) + .put("socketTimeoutMillis", 10_000) + .put("startWaitMillis", 5_000) + .put("streamingDisabled", false) + .put("userKeysCapacity", 1_000) + .put("userKeysFlushIntervalMillis", 300_000) + .put("usingProxy", false) + .put("usingProxyAuthenticator", false) + .put("usingRelayDaemon", false); + } + + private static LDValue makeSdkData(LDConfig config) { + return makeDiagnosticInitEvent(config).get("sdk"); + } + + private static LDValue makePlatformData() { + return makeDiagnosticInitEvent(LDConfig.DEFAULT).get("platform"); + } + + private static LDValue makeConfigData(LDConfig config) { + return makeDiagnosticInitEvent(config).get("configuration"); + } + + private static LDValue makeDiagnosticInitEvent(LDConfig config) { + ClientContext context = clientContext("SDK_KEY", config); // the SDK key doesn't matter for these tests + DiagnosticStore diagnosticStore = new DiagnosticStore( + ServerSideDiagnosticEvents.getSdkDiagnosticParams(context, config)); + return diagnosticStore.getInitEvent().getJsonValue(); + } + + @Test + public void testDefaultDiagnosticConfiguration() { + LDConfig ldConfig = new LDConfig.Builder().build(); + LDValue diagnosticJson = makeConfigData(ldConfig); + LDValue expected = expectedDefaultProperties().build(); + + assertEquals(expected, diagnosticJson); + } + + @Test + public void testCustomDiagnosticConfigurationGeneralProperties() { + LDConfig ldConfig = new LDConfig.Builder() + .startWait(Duration.ofSeconds(10)) + .build(); + + LDValue diagnosticJson = makeConfigData(ldConfig); + LDValue expected = expectedDefaultProperties() + .put("startWaitMillis", 10_000) + .build(); + + assertJsonEquals(expected, diagnosticJson); + } + + @Test + public void testCustomDiagnosticConfigurationForServiceEndpoints() { + LDConfig ldConfig1 = new LDConfig.Builder() + .serviceEndpoints( + Components.serviceEndpoints() + .streaming(CUSTOM_URI) + .events(CUSTOM_URI) + // this shouldn't show up in diagnostics because we don't use the polling component + .polling(CUSTOM_URI) + ) + .build(); + LDValue expected1 = expectedDefaultProperties() + .put("customStreamURI", true) + .put("customEventsURI", true) + .build(); + assertJsonEquals(expected1, makeConfigData(ldConfig1)); + + LDConfig ldConfig2 = new LDConfig.Builder() + .serviceEndpoints( + Components.serviceEndpoints() + .events(CUSTOM_URI) + .polling(CUSTOM_URI) + ) + .dataSource( + Components.pollingDataSource() + ) + .events(Components.sendEvents()) + .build(); + LDValue expected2 = expectedDefaultPropertiesWithoutStreaming() + .put("customBaseURI", true) + .put("customEventsURI", true) + .put("customStreamURI", false) + .put("pollingIntervalMillis", PollingDataSourceBuilder.DEFAULT_POLL_INTERVAL.toMillis()) + .put("streamingDisabled", true) + .build(); + assertJsonEquals(expected2, makeConfigData(ldConfig2)); + } + + @Test + public void testCustomDiagnosticConfigurationForStreaming() { + LDConfig ldConfig1 = new LDConfig.Builder() + .dataSource( + Components.streamingDataSource() + .initialReconnectDelay(Duration.ofSeconds(2)) + ) + .build(); + LDValue expected1 = expectedDefaultPropertiesWithoutStreaming() + .put("reconnectTimeMillis", 2_000) + .build(); + assertJsonEquals(expected1, makeConfigData(ldConfig1)); + + LDConfig ldConfig2 = new LDConfig.Builder() + .dataSource(Components.streamingDataSource()) // no custom base URIs + .build(); + LDValue expected2 = expectedDefaultProperties().build(); + assertEquals(expected2, makeConfigData(ldConfig2)); + } + + @Test + public void testCustomDiagnosticConfigurationForPolling() { + LDConfig ldConfig1 = new LDConfig.Builder() + .dataSource( + Components.pollingDataSource() + .pollInterval(Duration.ofSeconds(60)) + ) + .build(); + LDValue expected1 = expectedDefaultPropertiesWithoutStreaming() + .put("pollingIntervalMillis", 60_000) + .put("streamingDisabled", true) + .build(); + assertJsonEquals(expected1, makeConfigData(ldConfig1)); + + LDConfig ldConfig2 = new LDConfig.Builder() + .dataSource(Components.pollingDataSource()) // no custom base URI + .build(); + LDValue expected2 = expectedDefaultPropertiesWithoutStreaming() + .put("pollingIntervalMillis", PollingDataSourceBuilder.DEFAULT_POLL_INTERVAL.toMillis()) + .put("streamingDisabled", true) + .build(); + assertJsonEquals(expected2, makeConfigData(ldConfig2)); + } + + @Test + public void testCustomDiagnosticConfigurationForCustomDataStore() { + LDConfig ldConfig1 = new LDConfig.Builder() + .dataStore(new DataStoreFactoryWithDiagnosticDescription(LDValue.of("my-test-store"))) + .build(); + LDValue expected1 = expectedDefaultProperties().put("dataStoreType", "my-test-store").build(); + assertJsonEquals(expected1, makeConfigData(ldConfig1)); + + LDConfig ldConfig2 = new LDConfig.Builder() + .dataStore(new DataStoreFactoryWithoutDiagnosticDescription()) + .build(); + LDValue expected2 = expectedDefaultProperties().put("dataStoreType", "custom").build(); + assertJsonEquals(expected2, makeConfigData(ldConfig2)); + + LDConfig ldConfig3 = new LDConfig.Builder() + .dataStore(new DataStoreFactoryWithDiagnosticDescription(null)) + .build(); + LDValue expected3 = expectedDefaultProperties().put("dataStoreType", "custom").build(); + assertJsonEquals(expected3, makeConfigData(ldConfig3)); + + LDConfig ldConfig4 = new LDConfig.Builder() + .dataStore(new DataStoreFactoryWithDiagnosticDescription(LDValue.of(4))) + .build(); + LDValue expected4 = expectedDefaultProperties().put("dataStoreType", "custom").build(); + assertJsonEquals(expected4, makeConfigData(ldConfig4)); + } + + @Test + public void testCustomDiagnosticConfigurationForPersistentDataStore() { + LDConfig ldConfig1 = new LDConfig.Builder() + .dataStore(Components.persistentDataStore(new PersistentDataStoreFactoryWithComponentName())) + .build(); + + LDValue diagnosticJson1 = makeConfigData(ldConfig1); + LDValue expected1 = expectedDefaultProperties().put("dataStoreType", "my-test-store").build(); + + assertJsonEquals(expected1, diagnosticJson1); + + LDConfig ldConfig2 = new LDConfig.Builder() + .dataStore(Components.persistentDataStore(new PersistentDataStoreFactoryWithoutComponentName())) + .build(); + + LDValue diagnosticJson2 = makeConfigData(ldConfig2); + LDValue expected2 = expectedDefaultProperties().put("dataStoreType", "custom").build(); + + assertJsonEquals(expected2, diagnosticJson2); + } + + @Test + public void testCustomDiagnosticConfigurationForEvents() { + LDConfig ldConfig1 = new LDConfig.Builder() + .events( + Components.sendEvents() + .allAttributesPrivate(true) + .capacity(20_000) + .diagnosticRecordingInterval(Duration.ofSeconds(1_800)) + .flushInterval(Duration.ofSeconds(10)) + .userKeysCapacity(2_000) + .userKeysFlushInterval(Duration.ofSeconds(600)) + ) + .build(); + + LDValue diagnosticJson1 = makeConfigData(ldConfig1); + LDValue expected1 = expectedDefaultProperties() + .put("allAttributesPrivate", true) + .put("diagnosticRecordingIntervalMillis", 1_800_000) + .put("eventsCapacity", 20_000) + .put("eventsFlushIntervalMillis", 10_000) + .put("userKeysCapacity", 2_000) + .put("userKeysFlushIntervalMillis", 600_000) + .build(); + + assertJsonEquals(expected1, diagnosticJson1); + + LDConfig ldConfig2 = new LDConfig.Builder() + .events(Components.sendEvents()) // no custom base URI + .build(); + + LDValue diagnosticJson2 = makeConfigData(ldConfig2); + LDValue expected2 = expectedDefaultProperties().build(); + + assertJsonEquals(expected2, diagnosticJson2); + } + + @Test + public void testCustomDiagnosticConfigurationForDaemonMode() { + LDConfig ldConfig = new LDConfig.Builder() + .dataSource(Components.externalUpdatesOnly()) + .build(); + + LDValue diagnosticJson = makeConfigData(ldConfig); + LDValue expected = expectedDefaultPropertiesWithoutStreaming() + .put("usingRelayDaemon", true) + .build(); + + assertJsonEquals(expected, diagnosticJson); + } + + @Test + public void testCustomDiagnosticConfigurationHttpProperties() { + LDConfig ldConfig = new LDConfig.Builder() + .http( + Components.httpConfiguration() + .connectTimeout(Duration.ofSeconds(5)) + .socketTimeout(Duration.ofSeconds(20)) + .proxyHostAndPort("localhost", 1234) + .proxyAuth(Components.httpBasicAuthentication("username", "password")) + ) + .build(); + + LDValue diagnosticJson = makeConfigData(ldConfig); + LDValue expected = expectedDefaultProperties() + .put("connectTimeoutMillis", 5_000) + .put("socketTimeoutMillis", 20_000) + .put("usingProxy", true) + .put("usingProxyAuthenticator", true) + .build(); + + assertJsonEquals(expected, diagnosticJson); + } + + @Test + public void customComponentCannotInjectUnsupportedConfigProperty() { + String unsupportedPropertyName = "fake"; + LDValue description = LDValue.buildObject().put(unsupportedPropertyName, true).build(); + LDConfig config = new LDConfig.Builder() + .dataSource(new DataSourceFactoryWithDiagnosticDescription(description)) + .build(); + + LDValue diagnosticJson = makeConfigData(config); + + assertThat(jsonFromValue(diagnosticJson), jsonProperty(unsupportedPropertyName, jsonUndefined())); + } + + @Test + public void customComponentCannotInjectSupportedConfigPropertyWithWrongType() { + LDValue description = LDValue.buildObject().put("streamingDisabled", 3).build(); + LDConfig config = new LDConfig.Builder() + .dataSource(new DataSourceFactoryWithDiagnosticDescription(description)) + .build(); + + LDValue diagnosticJson = makeConfigData(config); + + assertThat(jsonFromValue(diagnosticJson), jsonProperty("streamingDisabled", jsonUndefined())); + } + + @Test + public void customComponentDescriptionOfUnsupportedTypeIsIgnored() { + LDConfig config1 = new LDConfig.Builder() + .dataSource(new DataSourceFactoryWithDiagnosticDescription(LDValue.of(3))) + .build(); + LDConfig config2 = new LDConfig.Builder() + .dataSource(new DataSourceFactoryWithoutDiagnosticDescription()) + .build(); + + LDValue diagnosticJson1 = makeConfigData(config1); + LDValue diagnosticJson2 = makeConfigData(config2); + + assertJsonEquals(diagnosticJson1, diagnosticJson2); + } + + private static class DataSourceFactoryWithDiagnosticDescription implements ComponentConfigurer, DiagnosticDescription { + private final LDValue value; + + DataSourceFactoryWithDiagnosticDescription(LDValue value) { + this.value = value; + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return value; + } + + @Override + public DataSource build(ClientContext context) { + return null; + } + } + + private static class DataSourceFactoryWithoutDiagnosticDescription implements ComponentConfigurer { + @Override + public DataSource build(ClientContext context) { + return null; + } + } + + private static class DataStoreFactoryWithDiagnosticDescription implements ComponentConfigurer, DiagnosticDescription { + private final LDValue value; + + DataStoreFactoryWithDiagnosticDescription(LDValue value) { + this.value = value; + } + + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return value; + } + + @Override + public DataStore build(ClientContext context) { + return null; + } + } + + private static class DataStoreFactoryWithoutDiagnosticDescription implements ComponentConfigurer { + @Override + public DataStore build(ClientContext context) { + return null; + } + } + + private static class PersistentDataStoreFactoryWithComponentName implements ComponentConfigurer, DiagnosticDescription { + @Override + public LDValue describeConfiguration(ClientContext clientContext) { + return LDValue.of("my-test-store"); + } + + @Override + public PersistentDataStore build(ClientContext context) { + return null; + } + } + + private static class PersistentDataStoreFactoryWithoutComponentName implements ComponentConfigurer { + @Override + public PersistentDataStore build(ClientContext context) { + return null; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicatorTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicatorTest.java new file mode 100644 index 0000000..7c8e6f1 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/ServerSideEventContextDeduplicatorTest.java @@ -0,0 +1,66 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.EventContextDeduplicator; + +import org.junit.Test; + +import java.time.Duration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +@SuppressWarnings("javadoc") +public class ServerSideEventContextDeduplicatorTest { + private static final Duration LONG_INTERVAL = Duration.ofHours(3); + + @Test + public void configuredFlushIntervalIsReturned() { + EventContextDeduplicator ecd = new ServerSideEventContextDeduplicator(1000, LONG_INTERVAL); + assertThat(ecd.getFlushInterval(), equalTo(LONG_INTERVAL.toMillis())); + } + + @Test + public void singleKindContextKeysAreDeduplicated() { + EventContextDeduplicator ecd = new ServerSideEventContextDeduplicator(1000, LONG_INTERVAL); + + assertThat(ecd.processContext(LDContext.create("a")), is(true)); + assertThat(ecd.processContext(LDContext.create("b")), is(true)); + assertThat(ecd.processContext(LDContext.create("a")), is(false)); + assertThat(ecd.processContext(LDContext.create("c")), is(true)); + assertThat(ecd.processContext(LDContext.create("c")), is(false)); + assertThat(ecd.processContext(LDContext.create("b")), is(false)); + } + + @Test + public void keysAreDisambiguatedByKind() { + EventContextDeduplicator ecd = new ServerSideEventContextDeduplicator(1000, LONG_INTERVAL); + ContextKind kind1 = ContextKind.of("kind1"), kind2 = ContextKind.of("kind2"); + + assertThat(ecd.processContext(LDContext.create(kind1, "a")), is(true)); + assertThat(ecd.processContext(LDContext.create(kind1, "b")), is(true)); + assertThat(ecd.processContext(LDContext.create(kind1, "a")), is(false)); + assertThat(ecd.processContext(LDContext.create(kind2, "a")), is(true)); + assertThat(ecd.processContext(LDContext.create(kind2, "a")), is(false)); + } + + @Test + public void multiKindContextIsDisambiguatedFromSingleKinds() { + // This should work automatically because of the defined behavior of LDContext.fullyQualifiedKey() + EventContextDeduplicator ecd = new ServerSideEventContextDeduplicator(1000, LONG_INTERVAL); + ContextKind kind1 = ContextKind.of("kind1"), kind2 = ContextKind.of("kind2"); + + LDContext c1 = LDContext.create(kind1, "a"); + LDContext c2 = LDContext.create(kind2, "a"); + LDContext mc = LDContext.createMulti(c1, c2); + + assertThat(ecd.processContext(c1), is(true)); + assertThat(ecd.processContext(c2), is(true)); + assertThat(ecd.processContext(c1), is(false)); + assertThat(ecd.processContext(c2), is(false)); + assertThat(ecd.processContext(mc), is(true)); + assertThat(ecd.processContext(mc), is(false)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SimpleLRUCacheTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SimpleLRUCacheTest.java new file mode 100644 index 0000000..69cf609 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/SimpleLRUCacheTest.java @@ -0,0 +1,60 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.SimpleLRUCache; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class SimpleLRUCacheTest { + @Test + public void getReturnsNullForNeverSeenValue() { + SimpleLRUCache cache = new SimpleLRUCache<>(10); + assertNull(cache.get("a")); + } + + @Test + public void putReturnsNullForNeverSeenValue() { + SimpleLRUCache cache = new SimpleLRUCache<>(10); + assertNull(cache.put("a", "1")); + } + + @Test + public void putReturnsPreviousValueForAlreadySeenValue() { + SimpleLRUCache cache = new SimpleLRUCache<>(10); + cache.put("a", "1"); + assertEquals("1", cache.put("a", "2")); + } + + @Test + public void oldestValueIsDiscardedWhenCapacityIsExceeded() { + SimpleLRUCache cache = new SimpleLRUCache<>(2); + cache.put("a", "1"); + cache.put("b", "2"); + cache.put("c", "3"); + assertEquals("3", cache.get("c")); + assertEquals("2", cache.get("b")); + assertNull(cache.get("a")); + } + + @Test + public void reAddingValueMakesItNewAgain() { + SimpleLRUCache cache = new SimpleLRUCache<>(2); + cache.put("a", "1"); + cache.put("b", "2"); + cache.put("c", "3"); + cache.put("a", "1"); + assertEquals("3", cache.get("c")); + assertEquals("1", cache.get("a")); + assertNull(cache.get("b")); + } + + @Test + public void zeroLengthCacheTreatsValuesAsNew() { + SimpleLRUCache cache = new SimpleLRUCache<>(0); + cache.put("a", "1"); + assertNull(cache.put("a", "2")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorEventsTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorEventsTest.java new file mode 100644 index 0000000..6bc69f7 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorEventsTest.java @@ -0,0 +1,130 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.StreamProcessorEvents.DeleteData; +import com.launchdarkly.sdk.server.StreamProcessorEvents.PatchData; +import com.launchdarkly.sdk.server.StreamProcessorEvents.PutData; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.SerializationException; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.JsonHelpers.serialize; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.StreamProcessorEvents.parseDeleteData; +import static com.launchdarkly.sdk.server.StreamProcessorEvents.parsePatchData; +import static com.launchdarkly.sdk.server.StreamProcessorEvents.parsePutData; +import static com.launchdarkly.sdk.server.TestUtil.assertDataSetEquals; +import static com.launchdarkly.sdk.server.TestUtil.assertItemEquals; +import static com.launchdarkly.sdk.server.TestUtil.assertThrows; +import static com.launchdarkly.sdk.server.TestUtil.jsonReaderFrom; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class StreamProcessorEventsTest { + + @Test + public void parsingPutData() throws Exception { + FeatureFlag flag = flagBuilder("flag1").version(1000).build(); + Segment segment = segmentBuilder("segment1").version(1000).build(); + + String allDataJson = "{" + + "\"flags\": {\"flag1\":" + serialize(flag) + "}" + + ",\"segments\": {\"segment1\":" + serialize(segment) + "}}"; + FullDataSet expectedAllData = DataBuilder.forStandardTypes() + .addAny(FEATURES, flag).addAny(SEGMENTS, segment).build(); + String validInput = "{\"path\": \"/\", \"data\":" + allDataJson + "}"; + PutData validResult = parsePutData(jsonReaderFrom(validInput)); + assertThat(validResult.path, equalTo("/")); + assertDataSetEquals(expectedAllData, validResult.data); + + String inputWithoutPath = "{\"data\":" + allDataJson + "}"; + PutData resultWithoutPath = parsePutData(jsonReaderFrom(inputWithoutPath)); + assertThat(resultWithoutPath.path, nullValue()); + assertDataSetEquals(expectedAllData, validResult.data); + + String inputWithoutData = "{\"path\":\"/\"}"; + assertThrows(SerializationException.class, + () -> parsePutData(jsonReaderFrom(inputWithoutData))); + } + + @Test + public void parsingPatchData() throws Exception { + FeatureFlag flag = flagBuilder("flag1").version(1000).build(); + Segment segment = segmentBuilder("segment1").version(1000).build(); + String flagJson = serialize(flag), segmentJson = serialize(segment); + + String validFlagInput = "{\"path\":\"/flags/flag1\", \"data\":" + flagJson + "}"; + PatchData validFlagResult = parsePatchData(jsonReaderFrom(validFlagInput)); + assertThat(validFlagResult.kind, equalTo(FEATURES)); + assertThat(validFlagResult.key, equalTo(flag.getKey())); + assertItemEquals(flag, validFlagResult.item); + + String validSegmentInput = "{\"path\":\"/segments/segment1\", \"data\":" + segmentJson + "}"; + PatchData validSegmentResult = parsePatchData(jsonReaderFrom(validSegmentInput)); + assertThat(validSegmentResult.kind, equalTo(SEGMENTS)); + assertThat(validSegmentResult.key, equalTo(segment.getKey())); + assertItemEquals(segment, validSegmentResult.item); + + String validFlagInputWithDataBeforePath = "{\"data\":" + flagJson + ",\"path\":\"/flags/flag1\"}"; + PatchData validFlagResultWithDataBeforePath = parsePatchData( + jsonReaderFrom(validFlagInputWithDataBeforePath)); + assertThat(validFlagResultWithDataBeforePath.kind, equalTo(FEATURES)); + assertThat(validFlagResultWithDataBeforePath.key, equalTo(flag.getKey())); + assertItemEquals(flag, validFlagResultWithDataBeforePath.item); + + String validSegmentInputWithDataBeforePath = "{\"data\":" + segmentJson + ",\"path\":\"/segments/segment1\"}"; + PatchData validSegmentResultWithDataBeforePath = parsePatchData( + jsonReaderFrom(validSegmentInputWithDataBeforePath)); + assertThat(validSegmentResultWithDataBeforePath.kind, equalTo(SEGMENTS)); + assertThat(validSegmentResultWithDataBeforePath.key, equalTo(segment.getKey())); + assertItemEquals(segment, validSegmentResultWithDataBeforePath.item); + + String inputWithUnrecognizedPath = "{\"path\":\"/cats/lucy\", \"data\":" + flagJson + "}"; + PatchData resultWithUnrecognizedPath = parsePatchData( + jsonReaderFrom(inputWithUnrecognizedPath)); + assertThat(resultWithUnrecognizedPath.kind, nullValue()); + + String inputWithMissingPath = "{\"data\":" + flagJson + "}"; + assertThrows(SerializationException.class, + () -> parsePatchData(jsonReaderFrom(inputWithMissingPath))); + + String inputWithMissingData = "{\"path\":\"/flags/flag1\"}"; + assertThrows(SerializationException.class, + () -> parsePatchData(jsonReaderFrom(inputWithMissingData))); + } + + @Test + public void parsingDeleteData() { + String validFlagInput = "{\"path\":\"/flags/flag1\", \"version\": 3}"; + DeleteData validFlagResult = parseDeleteData(jsonReaderFrom(validFlagInput)); + assertThat(validFlagResult.kind, equalTo(FEATURES)); + assertThat(validFlagResult.key, equalTo("flag1")); + assertThat(validFlagResult.version, equalTo(3)); + + String validSegmentInput = "{\"path\":\"/segments/segment1\", \"version\": 4}"; + DeleteData validSegmentResult = parseDeleteData(jsonReaderFrom(validSegmentInput)); + assertThat(validSegmentResult.kind, equalTo(SEGMENTS)); + assertThat(validSegmentResult.key, equalTo("segment1")); + assertThat(validSegmentResult.version, equalTo(4)); + + String inputWithUnrecognizedPath = "{\"path\":\"/cats/macavity\", \"version\": 9}"; + DeleteData resultWithUnrecognizedPath = parseDeleteData(jsonReaderFrom(inputWithUnrecognizedPath)); + assertThat(resultWithUnrecognizedPath.kind, nullValue()); + + String inputWithMissingPath = "{\"version\": 1}"; + assertThrows(SerializationException.class, + () -> parseDeleteData(jsonReaderFrom(inputWithMissingPath))); + + String inputWithMissingVersion = "{\"path\": \"/flags/flag1\"}"; + assertThrows(SerializationException.class, + () -> parseDeleteData(jsonReaderFrom(inputWithMissingVersion))); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorTest.java new file mode 100644 index 0000000..884bc47 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/StreamProcessorTest.java @@ -0,0 +1,877 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.eventsource.MessageEvent; +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.LogCapture; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.TestComponents.DelegatingDataStore; +import com.launchdarkly.sdk.server.TestComponents.MockDataSourceUpdates; +import com.launchdarkly.sdk.server.TestComponents.MockDataSourceUpdates.UpsertParams; +import com.launchdarkly.sdk.server.TestComponents.MockDataStoreStatusProvider; +import com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; +import com.launchdarkly.testhelpers.ConcurrentHelpers; +import com.launchdarkly.testhelpers.httptest.Handler; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestInfo; +import com.launchdarkly.testhelpers.httptest.SpecialHttpConfigurations; +import com.launchdarkly.testhelpers.tcptest.TcpHandler; +import com.launchdarkly.testhelpers.tcptest.TcpHandlers; +import com.launchdarkly.testhelpers.tcptest.TcpServer; + +import org.hamcrest.MatcherAssert; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.net.URI; +import java.time.Duration; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.ModelBuilders.segmentBuilder; +import static com.launchdarkly.sdk.server.TestComponents.basicDiagnosticStore; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.dataSourceUpdates; +import static com.launchdarkly.sdk.server.TestUtil.requireDataSourceStatus; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertFutureIsCompleted; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class StreamProcessorTest extends BaseTest { + private static final String SDK_KEY = "sdk_key"; + private static final Duration BRIEF_RECONNECT_DELAY = Duration.ofMillis(10); + private static final String FEATURE1_KEY = "feature1"; + private static final int FEATURE1_VERSION = 11; + private static final DataModel.FeatureFlag FEATURE = flagBuilder(FEATURE1_KEY).version(FEATURE1_VERSION).build(); + private static final String SEGMENT1_KEY = "segment1"; + private static final int SEGMENT1_VERSION = 22; + private static final DataModel.Segment SEGMENT = segmentBuilder(SEGMENT1_KEY).version(SEGMENT1_VERSION).build(); + private static final String EMPTY_DATA_EVENT = makePutEvent(new DataBuilder().addAny(FEATURES).addAny(SEGMENTS)); + + private InMemoryDataStore dataStore; + private MockDataSourceUpdates dataSourceUpdates; + private MockDataStoreStatusProvider dataStoreStatusProvider; + + private static Handler streamResponse(String data) { + return Handlers.all( + Handlers.SSE.start(), + Handlers.SSE.event(data), + Handlers.SSE.leaveOpen() + ); + } + + private static Handler closableStreamResponse(String data, Semaphore closeSignal) { + return Handlers.all( + Handlers.SSE.start(), + Handlers.SSE.event(data), + Handlers.waitFor(closeSignal) + ); + } + + private static Handler streamThatSendsEventsAndThenStaysOpen(String... events) { + return Handlers.all( + Handlers.SSE.start(), + ctx -> { + for (String event: events) { + Handlers.SSE.event(event).apply(ctx); + } + Handlers.SSE.leaveOpen().apply(ctx); + } + ); + } + + private static Handler streamResponseFromQueue(BlockingQueue events) { + return Handlers.all( + Handlers.SSE.start(), + ctx -> { + while (true) { + try { + String event = events.take(); + Handlers.SSE.event(event).apply(ctx); + } catch (InterruptedException e) { + break; + } + } + } + ); + } + + private static String makeEvent(String type, String data) { + return "event: " + type + "\ndata: " + data; + } + + private static String makePutEvent(DataBuilder data) { + return makeEvent("put", "{\"data\":" + data.buildJson().toJsonString() + "}"); + } + + private static String makePatchEvent(String path, DataKind kind, VersionedData item) { + String json = kind.serialize(new ItemDescriptor(item.getVersion(), item)); + return makeEvent("patch", "{\"path\":\"" + path + "\",\"data\":" + json + "}"); + } + + private static String makeDeleteEvent(String path, int version) { + return makeEvent("delete", "{\"path\":\"" + path + "\",\"version\":" + version + "}"); + } + + @Before + public void setup() { + dataStore = new InMemoryDataStore(); + dataStoreStatusProvider = new MockDataStoreStatusProvider(); + dataSourceUpdates = TestComponents.dataSourceUpdates(dataStore, dataStoreStatusProvider); + } + + @Test + public void builderHasDefaultConfiguration() throws Exception { + ComponentConfigurer f = Components.streamingDataSource(); + try (StreamProcessor sp = (StreamProcessor)f.build(clientContext(SDK_KEY, baseConfig().build()) + .withDataSourceUpdateSink(dataSourceUpdates))) { + assertThat(sp.initialReconnectDelay, equalTo(StreamingDataSourceBuilder.DEFAULT_INITIAL_RECONNECT_DELAY)); + + String expected = StandardEndpoints.DEFAULT_STREAMING_BASE_URI.toString() + StandardEndpoints.STREAMING_REQUEST_PATH; + assertThat(sp.streamUri.toString(), equalTo(expected)); + } + } + + @Test + public void builderCanSpecifyConfiguration() throws Exception { + ComponentConfigurer f = Components.streamingDataSource() + .initialReconnectDelay(Duration.ofMillis(5555)) + .payloadFilter("myFilter"); + try (StreamProcessor sp = (StreamProcessor)f.build(clientContext(SDK_KEY, baseConfig().build()) + .withDataSourceUpdateSink(dataSourceUpdates(dataStore)))) { + assertThat(sp.initialReconnectDelay, equalTo(Duration.ofMillis(5555))); + assertThat(sp.streamUri.toString(), containsString("filter=myFilter")); + } + } + + @Test + public void emptyFilterIgnored() throws Exception { + ComponentConfigurer f = Components.streamingDataSource() + .initialReconnectDelay(Duration.ofMillis(5555)) + .payloadFilter(""); + try (StreamProcessor sp = (StreamProcessor)f.build(clientContext(SDK_KEY, baseConfig().build()) + .withDataSourceUpdateSink(dataSourceUpdates(dataStore)))) { + assertThat(sp.initialReconnectDelay, equalTo(Duration.ofMillis(5555))); + assertThat(sp.streamUri.toString(), not(containsString("filter"))); + } + } + + @Test + public void verifyStreamRequestProperties() throws Exception { + HttpConfiguration httpConfig = clientContext(SDK_KEY, baseConfig().build()).getHttp(); + + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + + RequestInfo req = server.getRecorder().requireRequest(); + assertThat(req.getMethod(), equalTo("GET")); + assertThat(req.getPath(), equalTo("/all")); + + for (Map.Entry kv: httpConfig.getDefaultHeaders()) { + assertThat(req.getHeader(kv.getKey()), equalTo(kv.getValue())); + } + assertThat(req.getHeader("Accept"), equalTo("text/event-stream")); + } + } + } + + @Test + public void streamBaseUriDoesNotNeedTrailingSlash() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + URI baseUri = server.getUri(); + MatcherAssert.assertThat(baseUri.toString(), endsWith("/")); + URI trimmedUri = URI.create(server.getUri().toString().substring(0, server.getUri().toString().length() - 1)); + try (StreamProcessor sp = createStreamProcessor(null, trimmedUri)) { + sp.start(); + + RequestInfo req = server.getRecorder().requireRequest(); + assertThat(req.getPath(), equalTo("/all")); + } + } + } + + @Test + public void streamBaseUriCanHaveContextPath() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + URI baseUri = server.getUri().resolve("/context/path"); + try (StreamProcessor sp = createStreamProcessor(null, baseUri)) { + sp.start(); + + RequestInfo req = server.getRecorder().requireRequest(); + assertThat(req.getPath(), equalTo("/context/path/all")); + } + } + } + + @Test + public void putCausesFeatureToBeStored() throws Exception { + FeatureFlag flag = flagBuilder(FEATURE1_KEY).version(FEATURE1_VERSION).build(); + DataBuilder data = new DataBuilder().addAny(FEATURES, flag).addAny(SEGMENTS); + Handler streamHandler = streamResponse(makePutEvent(data)); + + try (HttpServer server = HttpServer.start(streamHandler)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + + dataSourceUpdates.awaitInit(); + assertFeatureInStore(flag); + } + } + } + + @Test + public void putCausesSegmentToBeStored() throws Exception { + Segment segment = ModelBuilders.segmentBuilder(SEGMENT1_KEY).version(SEGMENT1_VERSION).build(); + DataBuilder data = new DataBuilder().addAny(FEATURES).addAny(SEGMENTS, segment); + Handler streamHandler = streamResponse(makePutEvent(data)); + + try (HttpServer server = HttpServer.start(streamHandler)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + + dataSourceUpdates.awaitInit(); + assertSegmentInStore(SEGMENT); + } + } + } + + @Test + public void storeNotInitializedByDefault() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(""))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + assertFalse(dataStore.isInitialized()); + } + } + } + + @Test + public void processorNotInitializedByDefault() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(""))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + assertFalse(sp.isInitialized()); + } + } + } + + @Test + public void futureIsNotSetByDefault() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(""))) { + try (StreamProcessor sp = createStreamProcessor(server.getUri())) { + Future future = sp.start(); + assertFalse(future.isDone()); + } + } + } + + @Test + public void putCausesStoreAndProcessorToBeInitialized() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + Future future = sp.start(); + + dataSourceUpdates.awaitInit(); + assertFutureIsCompleted(future, 1, TimeUnit.SECONDS); + assertTrue(dataStore.isInitialized()); + assertTrue(sp.isInitialized()); + assertTrue(future.isDone()); + } + } + } + + @Test + public void patchUpdatesFeature() throws Exception { + doPatchSuccessTest(FEATURES, FEATURE, "/flags/" + FEATURE.getKey()); + } + + @Test + public void patchUpdatesSegment() throws Exception { + doPatchSuccessTest(SEGMENTS, SEGMENT, "/segments/" + SEGMENT.getKey()); + } + + private void doPatchSuccessTest(DataKind kind, VersionedData item, String path) throws Exception { + BlockingQueue events = new LinkedBlockingQueue<>(); + events.add(EMPTY_DATA_EVENT); + + try (HttpServer server = HttpServer.start(streamResponseFromQueue(events))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + dataSourceUpdates.awaitInit(); + + events.add(makePatchEvent(path, kind, item)); + UpsertParams gotUpsert = dataSourceUpdates.awaitUpsert(); + + assertThat(gotUpsert.kind, equalTo(kind)); + assertThat(gotUpsert.key, equalTo(item.getKey())); + assertThat(gotUpsert.item.getVersion(), equalTo(item.getVersion())); + + ItemDescriptor result = dataStore.get(kind, item.getKey()); + assertNotNull(result.getItem()); + assertEquals(item.getVersion(), result.getVersion()); + } + } + } + + @Test + public void deleteDeletesFeature() throws Exception { + doDeleteSuccessTest(FEATURES, FEATURE, "/flags/" + FEATURE.getKey()); + } + + @Test + public void deleteDeletesSegment() throws Exception { + doDeleteSuccessTest(SEGMENTS, SEGMENT, "/segments/" + SEGMENT.getKey()); + } + + private void doDeleteSuccessTest(DataKind kind, VersionedData item, String path) throws Exception { + BlockingQueue events = new LinkedBlockingQueue<>(); + events.add(EMPTY_DATA_EVENT); + + try (HttpServer server = HttpServer.start(streamResponseFromQueue(events))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + dataSourceUpdates.awaitInit(); + + dataStore.upsert(kind, item.getKey(), new ItemDescriptor(item.getVersion(), item)); + + events.add(makeDeleteEvent(path, item.getVersion() + 1)); + UpsertParams gotUpsert = dataSourceUpdates.awaitUpsert(); + + assertThat(gotUpsert.kind, equalTo(kind)); + assertThat(gotUpsert.key, equalTo(item.getKey())); + assertThat(gotUpsert.item.getVersion(), equalTo(item.getVersion() + 1)); + + assertEquals(ItemDescriptor.deletedItem(item.getVersion() + 1), dataStore.get(kind, item.getKey())); + } + } + } + + @Test + public void unknownEventTypeDoesNotCauseError() throws Exception { + verifyEventCausesNoStreamRestart("what", ""); + } + + @Test + public void streamWillReconnectAfterGeneralIOException() throws Exception { + Handler streamHandler = streamResponse(EMPTY_DATA_EVENT); + + try (HttpServer server = HttpServer.start(streamHandler)) { + TcpHandler errorThenSuccess = TcpHandlers.sequential( + TcpHandlers.noResponse(), // this will cause an IOException due to closing the connection without a response + TcpHandlers.forwardToPort(server.getPort()) + ); + try (TcpServer forwardingServer = TcpServer.start(errorThenSuccess)) { + try (StreamProcessor sp = createStreamProcessor(null, forwardingServer.getHttpUri())) { + startAndWait(sp); + + assertThat(server.getRecorder().count(), equalTo(1)); // the HTTP server doesn't see the initial request that the forwardingServer rejected + assertThat(dataSourceUpdates.getLastStatus().getLastError(), notNullValue()); + assertThat(dataSourceUpdates.getLastStatus().getLastError().getKind(), equalTo(ErrorKind.NETWORK_ERROR)); + } + } + } + } + + @Test + public void streamInitDiagnosticRecordedOnOpen() throws Exception { + DiagnosticStore acc = basicDiagnosticStore(); + long startTime = System.currentTimeMillis(); + + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri(), acc)) { + startAndWait(sp); + + long timeAfterOpen = System.currentTimeMillis(); + LDValue event = acc.createEventAndReset(0, 0).getJsonValue(); + LDValue streamInits = event.get("streamInits"); + assertEquals(1, streamInits.size()); + LDValue init = streamInits.get(0); + assertFalse(init.get("failed").booleanValue()); + assertThat(init.get("timestamp").longValue(), + allOf(greaterThanOrEqualTo(startTime), lessThanOrEqualTo(timeAfterOpen))); + assertThat(init.get("durationMillis").longValue(), lessThanOrEqualTo(timeAfterOpen - startTime)); + } + } + } + + @Test + public void streamInitDiagnosticRecordedOnErrorDuringInit() throws Exception { + DiagnosticStore acc = basicDiagnosticStore(); + long startTime = System.currentTimeMillis(); + + Handler errorHandler = Handlers.status(503); + Handler streamHandler = streamResponse(EMPTY_DATA_EVENT); + Handler errorThenSuccess = Handlers.sequential(errorHandler, streamHandler); + + try (HttpServer server = HttpServer.start(errorThenSuccess)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri(), acc)) { + startAndWait(sp); + + long timeAfterOpen = System.currentTimeMillis(); + LDValue event = acc.createEventAndReset(0, 0).getJsonValue(); + + LDValue streamInits = event.get("streamInits"); + assertEquals(2, streamInits.size()); + LDValue init0 = streamInits.get(0); + assertTrue(init0.get("failed").booleanValue()); + assertThat(init0.get("timestamp").longValue(), + allOf(greaterThanOrEqualTo(startTime), lessThanOrEqualTo(timeAfterOpen))); + assertThat(init0.get("durationMillis").longValue(), lessThanOrEqualTo(timeAfterOpen - startTime)); + + LDValue init1 = streamInits.get(1); + assertFalse(init1.get("failed").booleanValue()); + assertThat(init1.get("timestamp").longValue(), + allOf(greaterThanOrEqualTo(init0.get("timestamp").longValue()), lessThanOrEqualTo(timeAfterOpen))); + } + } + } + + @Test + public void http400ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(400); + } + + @Test + public void http401ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(401); + } + + @Test + public void http403ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(403); + } + + @Test + public void http408ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(408); + } + + @Test + public void http429ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(429); + } + + @Test + public void http500ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(500); + } + + @Test + public void putEventWithInvalidJsonCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("put", "{sorry", ErrorKind.INVALID_DATA); + } + + @Test + public void putEventWithWellFormedJsonButInvalidDataCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("put", "{\"data\":{\"flags\":3}}", ErrorKind.INVALID_DATA); + } + + @Test + public void patchEventWithInvalidJsonCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("patch", "{sorry", ErrorKind.INVALID_DATA); + } + + @Test + public void patchEventWithWellFormedJsonButInvalidDataCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("patch", "{\"path\":\"/flags/flagkey\", \"data\":{\"rules\":3}}", ErrorKind.INVALID_DATA); + } + + @Test + public void patchEventWithInvalidPathCausesNoStreamRestart() throws Exception { + verifyEventCausesNoStreamRestart("patch", "{\"path\":\"/wrong\", \"data\":{\"key\":\"flagkey\"}}"); + } + + @Test + public void patchEventWithNullPathCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("patch", "{\"path\":null, \"data\":{\"key\":\"flagkey\"}}", ErrorKind.INVALID_DATA); + } + + @Test + public void deleteEventWithInvalidJsonCausesStreamRestart() throws Exception { + verifyEventCausesStreamRestart("delete", "{sorry", ErrorKind.INVALID_DATA); + } + + @Test + public void deleteEventWithInvalidPathCausesNoStreamRestart() throws Exception { + verifyEventCausesNoStreamRestart("delete", "{\"path\":\"/wrong\", \"version\":1}"); + } + + @Test + public void indirectPatchEventWithInvalidPathDoesNotCauseStreamRestart() throws Exception { + verifyEventCausesNoStreamRestart("indirect/patch", "/wrong"); + } + + @Test + public void restartsStreamIfStoreNeedsRefresh() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + startAndWait(sp); + dataSourceUpdates.awaitInit(); + server.getRecorder().requireRequest(); + + dataStoreStatusProvider.updateStatus(new DataStoreStatusProvider.Status(false, false)); + dataStoreStatusProvider.updateStatus(new DataStoreStatusProvider.Status(true, true)); + + dataSourceUpdates.awaitInit(); + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + @Test + public void doesNotRestartStreamIfStoreHadOutageButDoesNotNeedRefresh() throws Exception { + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + startAndWait(sp); + dataSourceUpdates.awaitInit(); + server.getRecorder().requireRequest(); + + dataStoreStatusProvider.updateStatus(new DataStoreStatusProvider.Status(false, false)); + dataStoreStatusProvider.updateStatus(new DataStoreStatusProvider.Status(true, false)); + + server.getRecorder().requireNoRequests(100, TimeUnit.MILLISECONDS); + } + } + } + + private void verifyStoreErrorCausesStreamRestart(String eventName, String eventData) throws Exception { + AtomicInteger updateCount = new AtomicInteger(0); + Runnable preUpdateHook = () -> { + int count = updateCount.incrementAndGet(); + if (count == 2) { + // only fail on the 2nd update - the first is the one caused by the initial "put" in the test setup + throw new RuntimeException("sorry"); + } + }; + DelegatingDataStore delegatingStore = new DelegatingDataStore(dataStore, preUpdateHook); + dataStoreStatusProvider = new MockDataStoreStatusProvider(false); // false = the store does not provide status monitoring + dataSourceUpdates = TestComponents.dataSourceUpdates(delegatingStore, dataStoreStatusProvider); + + verifyEventCausesStreamRestart(eventName, eventData, ErrorKind.STORE_ERROR); + } + + @Test + public void storeFailureOnPutCausesStreamRestart() throws Exception { + verifyStoreErrorCausesStreamRestart("put", emptyPutEvent().getData()); + } + + @Test + public void storeFailureOnPatchCausesStreamRestart() throws Exception { + String patchData = "{\"path\":\"/flags/flagkey\",\"data\":{\"key\":\"flagkey\",\"version\":1}}"; + verifyStoreErrorCausesStreamRestart("patch", patchData); + } + + @Test + public void storeFailureOnDeleteCausesStreamRestart() throws Exception { + String deleteData = "{\"path\":\"/flags/flagkey\",\"version\":1}"; + verifyStoreErrorCausesStreamRestart("delete", deleteData); + } + + @Test + public void sseCommentIsIgnored() throws Exception { + BlockingQueue events = new LinkedBlockingQueue<>(); + events.add(EMPTY_DATA_EVENT); + + try (HttpServer server = HttpServer.start(streamResponseFromQueue(events))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + startAndWait(sp); + + events.add(": this is a comment"); + + // Do something after the comment, just to verify that the stream is still working + events.add(makePatchEvent("/flags/" + FEATURE.getKey(), FEATURES, FEATURE)); + dataSourceUpdates.awaitUpsert(); + } + assertThat(server.getRecorder().count(), equalTo(1)); // did not restart + assertThat(dataSourceUpdates.getLastStatus().getLastError(), nullValue()); + } + } + + private void verifyEventCausesNoStreamRestart(String eventName, String eventData) throws Exception { + BlockingQueue events = new LinkedBlockingQueue<>(); + events.add(EMPTY_DATA_EVENT); + + try (HttpServer server = HttpServer.start(streamResponseFromQueue(events))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + startAndWait(sp); + + events.add(makeEvent(eventName, eventData)); + + // Do something after the test event, just to verify that the stream is still working + events.add(makePatchEvent("/flags/" + FEATURE.getKey(), FEATURES, FEATURE)); + dataSourceUpdates.awaitUpsert(); + } + assertThat(server.getRecorder().count(), equalTo(1)); // did not restart + assertThat(dataSourceUpdates.getLastStatus().getLastError(), nullValue()); + } + } + + private void verifyEventCausesStreamRestart(String eventName, String eventData, ErrorKind expectedError) throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + BlockingQueue events = new LinkedBlockingQueue<>(); + events.add(EMPTY_DATA_EVENT); + + Handler responses = Handlers.sequential( + streamResponseFromQueue(events), // use a queue for the first request so we can control it below + streamThatSendsEventsAndThenStaysOpen(EMPTY_DATA_EVENT) // second request just gets a "put" + ); + try (HttpServer server = HttpServer.start(responses)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + dataSourceUpdates.awaitInit(); + server.getRecorder().requireRequest(); + + // first connection succeeds and gets the "put" + requireDataSourceStatus(statuses, State.VALID); + + // now, cause a problematic event to appear + events.add(makeEvent(eventName, eventData)); + + server.getRecorder().requireRequest(); + dataSourceUpdates.awaitInit(); + + Status status = requireDataSourceStatus(statuses, State.INTERRUPTED); + assertThat(status.getLastError(), notNullValue()); + assertThat(status.getLastError().getKind(), equalTo(expectedError)); + + requireDataSourceStatus(statuses, State.VALID); + } + } + } + + @Test + public void testSpecialHttpConfigurations() throws Exception { + Handler handler = streamResponse(EMPTY_DATA_EVENT); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.register(statuses::add); + + SpecialHttpConfigurations.testAll(handler, + (URI serverUri, SpecialHttpConfigurations.Params params) -> { + LDConfig config = baseConfig() + .http(TestUtil.makeHttpConfigurationFromTestParams(params)) + .build(); + + statuses.clear(); + + try (StreamProcessor sp = createStreamProcessor(config, serverUri)) { + sp.start(); + + Status status = ConcurrentHelpers.awaitValue(statuses, 1, TimeUnit.SECONDS); + if (status.getState() == State.VALID) { + return true; + } + assertNotNull(status.getLastError()); + assertEquals(ErrorKind.NETWORK_ERROR, status.getLastError().getKind()); + throw new IOException(status.getLastError().getMessage()); + } + }); + } + + @Test + public void closingStreamProcessorDoesNotLogNetworkError() throws Exception { + // This verifies that we're not generating misleading log output or status updates + // due to simply seeing a broken connection when we have already decided to shut down. + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + try (HttpServer server = HttpServer.start(streamResponse(EMPTY_DATA_EVENT))) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + dataSourceUpdates.awaitInit(); + requireDataSourceStatus(statuses, State.VALID); + + while (logCapture.awaitMessage(10) != null) {} // drain captured logs + + sp.close(); + + requireDataSourceStatus(statuses, State.OFF); // should not see INTERRUPTED + assertNoMoreValues(statuses, 100, TimeUnit.MILLISECONDS); + + assertThat(logCapture.requireMessage(10).getText(), startsWith("Closing LaunchDarkly")); + // There shouldn't be any other log output other than debugging + for (;;) { + LogCapture.Message message = logCapture.awaitMessage(10); + if (message == null) { + break; + } + assertThat(message.getLevel(), equalTo(LDLogLevel.DEBUG)); + } + } + } + } + + @Test + public void streamFailingWithIncompleteEventDoesNotLogJsonError() throws Exception { + String incompleteEvent = "event: put\ndata: {\"flags\":"; + Handler stream1 = Handlers.all( + Handlers.SSE.start(), + Handlers.writeChunkString(incompleteEvent) + ); + Handler stream2 = streamResponse(EMPTY_DATA_EVENT); + Handler stream1Then2 = Handlers.sequential(stream1, stream2); + + try (HttpServer server = HttpServer.start(stream1Then2)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + sp.start(); + dataSourceUpdates.awaitInit(); + + assertThat(logCapture.awaitMessage(LDLogLevel.ERROR, 0), nullValue()); + } + } + } + + private void testUnrecoverableHttpError(int statusCode) throws Exception { + Handler errorResp = Handlers.status(statusCode); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + try (HttpServer server = HttpServer.start(errorResp)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + Future initFuture = sp.start(); + assertFutureIsCompleted(initFuture, 2, TimeUnit.SECONDS); + + assertFalse(sp.isInitialized()); + + Status newStatus = requireDataSourceStatus(statuses, State.OFF); + assertEquals(ErrorKind.ERROR_RESPONSE, newStatus.getLastError().getKind()); + assertEquals(statusCode, newStatus.getLastError().getStatusCode()); + + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(50, TimeUnit.MILLISECONDS); + } + } + } + + private void testRecoverableHttpError(int statusCode) throws Exception { + Semaphore closeFirstStreamSignal = new Semaphore(0); + Handler errorResp = Handlers.status(statusCode); + Handler stream1Resp = closableStreamResponse(EMPTY_DATA_EVENT, closeFirstStreamSignal); + Handler stream2Resp = streamResponse(EMPTY_DATA_EVENT); + + // Set up the sequence of responses that we'll receive below. + Handler seriesOfResponses = Handlers.sequential(errorResp, stream1Resp, errorResp, stream2Resp); + + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.statusBroadcaster.register(statuses::add); + + try (HttpServer server = HttpServer.start(seriesOfResponses)) { + try (StreamProcessor sp = createStreamProcessor(null, server.getUri())) { + Future initFuture = sp.start(); + assertFutureIsCompleted(initFuture, 2, TimeUnit.SECONDS); + + assertTrue(sp.isInitialized()); + + // The first stream request receives an error response (errorResp). + Status failureStatus1 = requireDataSourceStatus(statuses, State.INITIALIZING); + assertEquals(ErrorKind.ERROR_RESPONSE, failureStatus1.getLastError().getKind()); + assertEquals(statusCode, failureStatus1.getLastError().getStatusCode()); + + // It tries to reconnect, and gets a valid response (stream1Resp). Now the stream is active. + Status successStatus1 = requireDataSourceStatus(statuses, State.VALID); + assertSame(failureStatus1.getLastError(), successStatus1.getLastError()); + + // Now we'll trigger a disconnection of the stream. The SDK detects that as a + // NETWORK_ERROR. The state changes to INTERRUPTED because it was previously connected. + closeFirstStreamSignal.release(); + Status failureStatus2 = requireDataSourceStatus(statuses, State.INTERRUPTED); + assertEquals(ErrorKind.NETWORK_ERROR, failureStatus2.getLastError().getKind()); + + // It tries to reconnect, and gets another errorResp. The state is still INTERRUPTED. + Status failureStatus3 = requireDataSourceStatus(statuses, State.INTERRUPTED); + assertEquals(ErrorKind.ERROR_RESPONSE, failureStatus3.getLastError().getKind()); + assertEquals(statusCode, failureStatus3.getLastError().getStatusCode()); + + // It tries again, and finally gets a valid response (stream2Resp). + Status successStatus2 = requireDataSourceStatus(statuses, State.VALID); + assertSame(failureStatus3.getLastError(), successStatus2.getLastError()); + } + } + } + + private StreamProcessor createStreamProcessor(URI streamUri) { + return createStreamProcessor(baseConfig().build(), streamUri, null); + } + + private StreamProcessor createStreamProcessor(LDConfig config, URI streamUri, DiagnosticStore acc) { + return new StreamProcessor( + ComponentsImpl.toHttpProperties(clientContext(SDK_KEY, config == null ? baseConfig().build() : config).getHttp()), + dataSourceUpdates, + Thread.MIN_PRIORITY, + acc, + streamUri, + null, + BRIEF_RECONNECT_DELAY, + testLogger + ); + } + + private StreamProcessor createStreamProcessor(LDConfig config, URI streamUri) { + return createStreamProcessor(config, streamUri, null); + } + + private static void startAndWait(StreamProcessor sp) { + Future ready = sp.start(); + try { + ready.get(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private MessageEvent emptyPutEvent() { + return new MessageEvent("{\"data\":{\"flags\":{},\"segments\":{}}}"); + } + + private void assertFeatureInStore(DataModel.FeatureFlag feature) { + assertEquals(feature.getVersion(), dataStore.get(FEATURES, feature.getKey()).getVersion()); + } + + private void assertSegmentInStore(DataModel.Segment segment) { + assertEquals(segment.getVersion(), dataStore.get(SEGMENTS, segment.getKey()).getVersion()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestComponents.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestComponents.java new file mode 100644 index 0000000..5719309 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestComponents.java @@ -0,0 +1,424 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.DiagnosticStore; +import com.launchdarkly.sdk.internal.events.Event; +import com.launchdarkly.sdk.internal.events.EventsConfiguration; +import com.launchdarkly.sdk.internal.events.DiagnosticStore.SdkDiagnosticParams; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.sdk.server.integrations.EventProcessorBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.interfaces.FlagChangeListener; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.EventProcessor; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; + +@SuppressWarnings("javadoc") +public class TestComponents { + public static ScheduledExecutorService sharedExecutor = newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("TestComponents-sharedExecutor-%d").build()); + + public static LDLogger nullLogger = LDLogger.withAdapter(Logs.none(), ""); + + public static DiagnosticStore basicDiagnosticStore() { + return new DiagnosticStore(new SdkDiagnosticParams("sdk_key", "sdk", "1.0.0", "Java", null, null, null)); + } + + public static ClientContextImpl clientContext(String sdkKey, LDConfig config) { + return ClientContextImpl.fromConfig(sdkKey, config, sharedExecutor); + } + + public static ClientContextImpl clientContext(String sdkKey, LDConfig config, + DataSourceUpdateSink dataSourceUpdateSink) { + return ClientContextImpl.fromConfig(sdkKey, config, sharedExecutor).withDataSourceUpdateSink(dataSourceUpdateSink); + } + + public static HttpConfiguration defaultHttpConfiguration() { + return clientContext("", LDConfig.DEFAULT).getHttp(); + } + + public static HttpProperties defaultHttpProperties() { + return ComponentsImpl.toHttpProperties(defaultHttpConfiguration()); + } + + public static DataStore dataStoreThatThrowsException(RuntimeException e) { + return new DataStoreThatThrowsException(e); + } + + public static MockDataSourceUpdates dataSourceUpdates(DataStore store) { + return dataSourceUpdates(store, null); + } + + public static MockDataSourceUpdates dataSourceUpdates(DataStore store, DataStoreStatusProvider dataStoreStatusProvider) { + return new MockDataSourceUpdates(store, dataStoreStatusProvider); + } + + static EventsConfiguration defaultEventsConfig() { + return makeEventsConfig(false, null); + } + + public static DataSource failedDataSource() { + return new DataSourceThatNeverInitializes(); + } + + public static DataStore inMemoryDataStore() { + return new InMemoryDataStore(); // this is for tests in other packages which can't see this concrete class + } + + public static DataStore initedDataStore() { + DataStore store = new InMemoryDataStore(); + store.init(new FullDataSet(null)); + return store; + } + + static EventsConfiguration makeEventsConfig(boolean allAttributesPrivate, + Collection privateAttributes) { + return new EventsConfiguration( + allAttributesPrivate, + 0, + null, + EventProcessorBuilder.DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL.toMillis(), + null, + null, + 0, + null, + EventProcessorBuilder.DEFAULT_FLUSH_INTERVAL.toMillis(), + false, + false, + privateAttributes + ); + } + + public static ComponentConfigurer specificComponent(final T instance) { + return context -> instance; + } + + public static class TestEventProcessor implements EventProcessor { + volatile List events = new ArrayList<>(); + volatile int flushCount; + + @Override + public void flush() { + flushCount++; + } + + @Override + public void close() throws IOException { + } + + @Override + public void recordEvaluationEvent(LDContext context, String flagKey, int flagVersion, int variation, LDValue value, + EvaluationReason reason, LDValue defaultValue, String prerequisiteOfFlagKey, boolean requireFullEvent, + Long debugEventsUntilDate, boolean excludeFromSummaries, + Long samplingRatio) { + events.add(new Event.FeatureRequest(System.currentTimeMillis(), flagKey, context, flagVersion, + variation, value, defaultValue, reason, prerequisiteOfFlagKey, requireFullEvent, debugEventsUntilDate, false)); + } + + @Override + public void recordIdentifyEvent(LDContext context) { + events.add(new Event.Identify(System.currentTimeMillis(), context)); + } + + + @Override + public void recordCustomEvent(LDContext context, String eventKey, LDValue data, Double metricValue) { + events.add(new Event.Custom(System.currentTimeMillis(), eventKey, context, data, metricValue)); + } + + @Override + public void recordMigrationEvent(MigrationOpTracker tracker) { + Optional event = tracker.createEvent(); + if(event.isPresent()) { + events.add(event.get()); + } + } + } + + private static class DataSourceThatNeverInitializes implements DataSource { + public Future start() { + return new CompletableFuture<>(); + } + + public boolean isInitialized() { + return false; + } + + public void close() throws IOException { + } + }; + + public static class MockDataSourceUpdates implements DataSourceUpdateSink { + public static class UpsertParams { + public final DataKind kind; + public final String key; + public final ItemDescriptor item; + + UpsertParams(DataKind kind, String key, ItemDescriptor item) { + super(); + this.kind = kind; + this.key = key; + this.item = item; + } + } + + private final DataSourceUpdatesImpl wrappedInstance; + private final DataStoreStatusProvider dataStoreStatusProvider; + public final EventBroadcasterImpl flagChangeEventBroadcaster; + public final EventBroadcasterImpl + statusBroadcaster; + public final BlockingQueue> receivedInits = new LinkedBlockingQueue<>(); + public final BlockingQueue receivedUpserts = new LinkedBlockingQueue<>(); + + public MockDataSourceUpdates(DataStore store, DataStoreStatusProvider dataStoreStatusProvider) { + this.dataStoreStatusProvider = dataStoreStatusProvider; + this.flagChangeEventBroadcaster = EventBroadcasterImpl.forFlagChangeEvents(sharedExecutor, nullLogger); + this.statusBroadcaster = EventBroadcasterImpl.forDataSourceStatus(sharedExecutor, nullLogger); + this.wrappedInstance = new DataSourceUpdatesImpl( + store, + dataStoreStatusProvider, + flagChangeEventBroadcaster, + statusBroadcaster, + sharedExecutor, + null, + nullLogger + ); + } + + @Override + public boolean init(FullDataSet allData) { + boolean result = wrappedInstance.init(allData); + receivedInits.add(allData); + return result; + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + boolean result = wrappedInstance.upsert(kind, key, item); + receivedUpserts.add(new UpsertParams(kind, key, item)); + return result; + } + + @Override + public DataStoreStatusProvider getDataStoreStatusProvider() { + return dataStoreStatusProvider; + } + + @Override + public void updateStatus(State newState, ErrorInfo newError) { + wrappedInstance.updateStatus(newState, newError); + } + + public DataSourceStatusProvider.Status getLastStatus() { + return wrappedInstance.getLastStatus(); + } + + // this method is surfaced for use by tests in other packages that can't see the EventBroadcasterImpl class + public void register(DataSourceStatusProvider.StatusListener listener) { + statusBroadcaster.register(listener); + } + + public FullDataSet awaitInit() { + return awaitValue(receivedInits, 5, TimeUnit.SECONDS); + } + + public UpsertParams awaitUpsert() { + return awaitValue(receivedUpserts, 5, TimeUnit.SECONDS); + } + } + + public static class ContextCapturingFactory implements ComponentConfigurer { + public volatile ClientContext clientContext; + private final ComponentConfigurer wrappedFactory; + + public ContextCapturingFactory(ComponentConfigurer wrappedFactory) { + this.wrappedFactory = wrappedFactory; + } + + @Override + public T build(ClientContext context) { + this.clientContext = context; + return wrappedFactory.build(context); + } + } + + private static class DataStoreThatThrowsException implements DataStore { + private final RuntimeException e; + + DataStoreThatThrowsException(RuntimeException e) { + this.e = e; + } + + public void close() throws IOException { } + + public ItemDescriptor get(DataKind kind, String key) { + throw e; + } + + public KeyedItems getAll(DataKind kind) { + throw e; + } + + public void init(FullDataSet allData) { + throw e; + } + + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + throw e; + } + + public boolean isInitialized() { + return true; + } + + public boolean isStatusMonitoringEnabled() { + return false; + } + + public CacheStats getCacheStats() { + return null; + } + } + + public static class DelegatingDataStore implements DataStore { + private final DataStore store; + private final Runnable preUpdateHook; + + public DelegatingDataStore(DataStore store, Runnable preUpdateHook) { + this.store = store; + this.preUpdateHook = preUpdateHook; + } + + @Override + public void close() throws IOException { + store.close(); + } + + @Override + public void init(FullDataSet allData) { + if (preUpdateHook != null) { + preUpdateHook.run(); + } + store.init(allData); + } + + @Override + public ItemDescriptor get(DataKind kind, String key) { + return store.get(kind, key); + } + + @Override + public KeyedItems getAll(DataKind kind) { + return store.getAll(kind); + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + if (preUpdateHook != null) { + preUpdateHook.run(); + } + return store.upsert(kind, key, item); + } + + @Override + public boolean isInitialized() { + return store.isInitialized(); + } + + @Override + public boolean isStatusMonitoringEnabled() { + return store.isStatusMonitoringEnabled(); + } + + @Override + public CacheStats getCacheStats() { + return store.getCacheStats(); + } + } + + public static class MockDataStoreStatusProvider implements DataStoreStatusProvider { + public final EventBroadcasterImpl statusBroadcaster; + private final AtomicReference lastStatus; + private final boolean statusMonitoringEnabled; + + public MockDataStoreStatusProvider() { + this(true); + } + + public MockDataStoreStatusProvider(boolean statusMonitoringEnabled) { + this.statusBroadcaster = EventBroadcasterImpl.forDataStoreStatus(sharedExecutor, nullLogger); + this.lastStatus = new AtomicReference<>(new DataStoreStatusProvider.Status(true, false)); + this.statusMonitoringEnabled = statusMonitoringEnabled; + } + + // visible for tests + public void updateStatus(DataStoreStatusProvider.Status newStatus) { + if (newStatus != null) { + DataStoreStatusProvider.Status oldStatus = lastStatus.getAndSet(newStatus); + if (!newStatus.equals(oldStatus)) { + statusBroadcaster.broadcast(newStatus); + } + } + } + + @Override + public Status getStatus() { + return lastStatus.get(); + } + + @Override + public void addStatusListener(StatusListener listener) { + statusBroadcaster.register(listener); + } + + @Override + public void removeStatusListener(StatusListener listener) { + statusBroadcaster.unregister(listener); + } + + @Override + public boolean isStatusMonitoringEnabled() { + return statusMonitoringEnabled; + } + + @Override + public CacheStats getCacheStats() { + return null; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestUtil.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestUtil.java new file mode 100644 index 0000000..4752d91 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/TestUtil.java @@ -0,0 +1,257 @@ +package com.launchdarkly.sdk.server; + +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel.FeatureFlag; +import com.launchdarkly.sdk.server.DataModel.Segment; +import com.launchdarkly.sdk.server.DataModel.VersionedData; +import com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.interfaces.FlagChangeEvent; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.testhelpers.Assertions; +import com.launchdarkly.testhelpers.JsonAssertions; +import com.launchdarkly.testhelpers.httptest.SpecialHttpConfigurations; + +import java.io.StringReader; +import java.time.Duration; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toDataMap; +import static com.launchdarkly.sdk.server.JsonHelpers.serialize; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.fail; + +@SuppressWarnings("javadoc") +public class TestUtil { + /** + * We should use this instead of JsonHelpers.gsonInstance() in any test code that might be run from + * outside of this project (for instance, from java-server-sdk-redis or other integrations), because + * in that context the SDK classes might be coming from the default jar distribution where Gson is + * shaded. Therefore, if a test method tries to call an SDK implementation method like gsonInstance() + * that returns a Gson type, or one that takes an argument of a Gson type, that might fail at runtime + * because the Gson type has been changed to a shaded version. + */ + public static final Gson TEST_GSON_INSTANCE = new Gson(); + + public static String getSdkVersion() { + return Version.SDK_VERSION; + } + + public static void assertJsonEquals(LDValue expected, LDValue actual) { + // Gives a better failure diff than assertEquals + JsonAssertions.assertJsonEquals(expected.toJsonString(), actual.toJsonString()); + } + + public static void upsertFlag(DataStore store, FeatureFlag flag) { + store.upsert(FEATURES, flag.getKey(), new ItemDescriptor(flag.getVersion(), flag)); + } + + public static void upsertSegment(DataStore store, Segment segment) { + store.upsert(SEGMENTS, segment.getKey(), new ItemDescriptor(segment.getVersion(), segment)); + } + + public static DataSourceStatusProvider.Status requireDataSourceStatus(BlockingQueue statuses, Duration timeout) { + return awaitValue(statuses, timeout.toMillis(), TimeUnit.MILLISECONDS); + } + + public static DataSourceStatusProvider.Status requireDataSourceStatus(BlockingQueue statuses) { + return requireDataSourceStatus(statuses, Duration.ofSeconds(5)); + // Using a fairly long default timeout here because there can be unpredictable execution delays + // in CI. If there's a test where we specifically need to enforce a smaller timeout, we can set + // that explicitly on a per-call basis. + } + + public static DataSourceStatusProvider.Status requireDataSourceStatus(BlockingQueue statuses, + DataSourceStatusProvider.State expectedState, Duration timeout) { + DataSourceStatusProvider.Status status = requireDataSourceStatus(statuses, timeout); + assertEquals(expectedState, status.getState()); + return status; + } + + public static DataSourceStatusProvider.Status requireDataSourceStatus(BlockingQueue statuses, + DataSourceStatusProvider.State expectedState) { + return requireDataSourceStatus(statuses, expectedState, Duration.ofSeconds(5)); + } + + public static DataSourceStatusProvider.Status requireDataSourceStatusEventually(BlockingQueue statuses, + DataSourceStatusProvider.State expectedState, DataSourceStatusProvider.State possibleStateBeforeThat) { + return Assertions.assertPolledFunctionReturnsValue(2, TimeUnit.SECONDS, 0, null, () -> { + DataSourceStatusProvider.Status status = requireDataSourceStatus(statuses); + if (status.getState() == expectedState) { + return status; + } + assertEquals(possibleStateBeforeThat, status.getState()); + return null; + }); + } + + public static void assertDataSetEquals(FullDataSet expected, FullDataSet actual) { + String expectedJson = TEST_GSON_INSTANCE.toJson(toDataMap(expected)); + String actualJson = TEST_GSON_INSTANCE.toJson(toDataMap(actual)); + JsonAssertions.assertJsonEquals(expectedJson, actualJson); + } + + public static void assertItemEquals(VersionedData expected, ItemDescriptor item) { + assertEquals(expected.getVersion(), item.getVersion()); + assertEquals(expected.getClass(), item.getItem().getClass()); + JsonAssertions.assertJsonEquals(serialize(expected), serialize(item.getItem())); + } + + public static String describeDataSet(FullDataSet data) { + return Joiner.on(", ").join( + Iterables.transform(data.getData(), entry -> { + DataKind kind = entry.getKey(); + return "{" + kind + ": [" + + Joiner.on(", ").join( + Iterables.transform(entry.getValue().getItems(), item -> + kind.serialize(item.getValue()) + ) + ) + + "]}"; + })); + } + + public static JsonReader jsonReaderFrom(String data) { + return new JsonReader(new StringReader(data)); + } + + public static interface ActionCanThrowAnyException { + void apply(T param) throws Exception; + } + + public static void expectEvents(BlockingQueue events, String... flagKeys) { + Set expectedChangedFlagKeys = ImmutableSet.copyOf(flagKeys); + Set actualChangedFlagKeys = new HashSet<>(); + for (int i = 0; i < expectedChangedFlagKeys.size(); i++) { + T e = awaitValue(events, 1, TimeUnit.SECONDS); + actualChangedFlagKeys.add(e.getKey()); + } + assertThat(actualChangedFlagKeys, equalTo(expectedChangedFlagKeys)); + assertNoMoreValues(events, 100, TimeUnit.MILLISECONDS); + } + + public static EvalResult simpleEvaluation(int variation, LDValue value) { + return EvalResult.of(value, variation, EvaluationReason.fallthrough()); + } + + public static void assertFullyEqual(T a, T b) { + assertEquals(a, b); + assertEquals(b, a); + assertEquals(a.hashCode(), b.hashCode()); + } + + public static void assertFullyUnequal(T a, T b) { + assertNotEquals(a, b); + assertNotEquals(b, a); + } + + public static void assertThrows(Class exceptionClass, Runnable r) { + try { + r.run(); + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getClass(), equalTo(exceptionClass)); + } + } + + public static HttpConfigurationBuilder makeHttpConfigurationFromTestParams( + SpecialHttpConfigurations.Params params) { + HttpConfigurationBuilder b = Components.httpConfiguration(); + if (params.getTlsConfig() != null) { + b.sslSocketFactory(params.getTlsConfig().getSocketFactory(), params.getTlsConfig().getTrustManager()); + } + if (params.getProxyHost() != null) { + b.proxyHostAndPort(params.getProxyHost(), params.getProxyPort()); + if (params.getProxyBasicAuthUser() != null) { + b.proxyAuth(Components.httpBasicAuthentication(params.getProxyBasicAuthUser(), params.getProxyBasicAuthPassword())); + } + } + if (params.getSocketFactory() != null) { + b.socketFactory(params.getSocketFactory()); + } + return b; + } + + public interface BuilderPropertyTester { + void assertDefault(TValue defaultValue); + void assertCanSet(TValue newValue); + void assertSetIsChangedTo(TValue attempted, TValue resulting); + } + + public static class BuilderTestUtil { + private final Supplier constructor; + final Function buildMethod; + + public BuilderTestUtil(Supplier constructor, + Function buildMethod) { + this.constructor = constructor; + this.buildMethod = buildMethod; + } + + public BuilderPropertyTester property( + Function getter, + BiConsumer setter) { + return new BuilderPropertyTestImpl(this, getter, setter); + } + + public TBuilder createBuilder() { + return constructor.get(); + } + } + + static class BuilderPropertyTestImpl + implements BuilderPropertyTester { + private final BuilderTestUtil owner; + private final Function getter; + private final BiConsumer setter; + + public BuilderPropertyTestImpl(BuilderTestUtil owner, + Function getter, + BiConsumer setter) { + this.owner = owner; + this.getter = getter; + this.setter = setter; + } + + public void assertDefault(TValue defaultValue) { + assertValue(owner.createBuilder(), defaultValue); + } + + public void assertCanSet(TValue newValue) { + assertSetIsChangedTo(newValue, newValue); + } + + public void assertSetIsChangedTo(TValue attempted, TValue resulting) { + TBuilder builder = owner.createBuilder(); + setter.accept(builder, attempted); + assertValue(builder, resulting); + } + + private void assertValue(TBuilder b, TValue expected) { + TBuilt built = owner.buildMethod.apply(b); + assertEquals(expected, getter.apply(built)); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/UtilTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/UtilTest.java new file mode 100644 index 0000000..ef7267e --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/UtilTest.java @@ -0,0 +1,79 @@ +package com.launchdarkly.sdk.server; + +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.Util.applicationTagHeader; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import okhttp3.Authenticator; +import okhttp3.Protocol; +import okhttp3.Request; +import okhttp3.Response; + +@SuppressWarnings("javadoc") +public class UtilTest extends BaseTest { + @Test + public void useOurBasicAuthenticatorAsOkhttpProxyAuthenticator() throws Exception { + HttpAuthentication ourAuth = Components.httpBasicAuthentication("user", "pass"); + Authenticator okhttpAuth = Util.okhttpAuthenticatorFromHttpAuthStrategy(ourAuth); + + Request originalRequest = new Request.Builder().url("http://proxy").build(); + Response resp1 = new Response.Builder() + .request(originalRequest) + .message("") + .protocol(Protocol.HTTP_1_1) + .header("Proxy-Authentication", "Basic realm=x") + .code(407) + .build(); + + Request newRequest = okhttpAuth.authenticate(null, resp1); + + assertEquals("Basic dXNlcjpwYXNz", newRequest.header("Proxy-Authorization")); + + // simulate the proxy rejecting us again + Response resp2 = new Response.Builder() + .request(newRequest) + .message("") + .protocol(Protocol.HTTP_1_1) + .header("Proxy-Authentication", "Basic realm=x") + .code(407) + .build(); + + assertNull(okhttpAuth.authenticate(null, resp2)); // null tells OkHttp to give up + } + + @Test + public void describeDuration() { + assertEquals("15 milliseconds", Util.describeDuration(Duration.ofMillis(15))); + assertEquals("1500 milliseconds", Util.describeDuration(Duration.ofMillis(1500))); + assertEquals("1 second", Util.describeDuration(Duration.ofMillis(1000))); + assertEquals("2 seconds", Util.describeDuration(Duration.ofMillis(2000))); + assertEquals("70 seconds", Util.describeDuration(Duration.ofMillis(70000))); + assertEquals("1 minute", Util.describeDuration(Duration.ofMillis(60000))); + assertEquals("2 minutes", Util.describeDuration(Duration.ofMillis(120000))); + } + + @Test + public void testApplicationTagHeader() { + assertEquals("", applicationTagHeader(new ApplicationInfo(null, null), testLogger)); + assertEquals("application-id/foo", applicationTagHeader(new ApplicationInfo("foo", null), testLogger)); + assertEquals("application-version/1.0.0", + applicationTagHeader(new ApplicationInfo(null, "1.0.0"), testLogger)); + assertEquals("application-id/foo application-version/1.0.0", + applicationTagHeader(new ApplicationInfo("foo", "1.0.0"), testLogger)); + // Values with invalid characters get discarded + assertEquals("", applicationTagHeader(new ApplicationInfo("invalid name", "lol!"), testLogger)); + // Values over 64 chars get discarded + assertEquals("", applicationTagHeader( + new ApplicationInfo("look-at-this-incredibly-long-application-id-like-wow-it-sure-is-verbose", null), + testLogger)); + // Empty values get discarded + assertEquals("", applicationTagHeader(new ApplicationInfo("", ""), testLogger)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilderTest.java new file mode 100644 index 0000000..9b9977e --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ApplicationInfoBuilderTest.java @@ -0,0 +1,27 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class ApplicationInfoBuilderTest { + @Test + public void infoBuilder() { + ApplicationInfo i1 = Components.applicationInfo() + .createApplicationInfo(); + assertNull(i1.getApplicationId()); + assertNull(i1.getApplicationVersion()); + + ApplicationInfo i2 = Components.applicationInfo() + .applicationId("authentication-service") + .applicationVersion("1.0.0") + .createApplicationInfo(); + assertEquals("authentication-service", i2.getApplicationId()); + assertEquals("1.0.0", i2.getApplicationVersion()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBase.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBase.java new file mode 100644 index 0000000..ce07b00 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBase.java @@ -0,0 +1,181 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.BaseTest; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +/** + * A configurable test class for all implementations of {@link BigSegmentStore}. + *

    + * Each implementation of {@link BigSegmentStore} should define a test class that is a subclass of + * this class for their implementation type, and run it in the unit tests for their project. + *

    + * The tests are configured for the details specific to the implementation type by overriding the + * abstract methods {@link #makeStore(String)}, {@link #clearData(String)}, + * {@link #setMetadata(String, StoreMetadata)}, and {@link #setSegments(String, String, Iterable, Iterable)}. + */ +@SuppressWarnings("javadoc") +public abstract class BigSegmentStoreTestBase extends BaseTest { + private static final String prefix = "testprefix"; + private static final String fakeUserHash = "userhash"; + private static final String segmentRef1 = "key1", segmentRef2 = "key2", segmentRef3 = "key3"; + private static final String[] allSegmentRefs = {segmentRef1, segmentRef2, segmentRef3}; + + private ClientContext makeClientContext() { + return clientContext("", baseConfig().build()); + } + + private BigSegmentStore makeEmptyStore() throws Exception { + BigSegmentStore store = makeStore(prefix).build(makeClientContext()); + try { + clearData(prefix); + } catch (RuntimeException ex) { + store.close(); + throw ex; + } + return store; + } + + @Test + public void missingMetadata() throws Exception { + try (BigSegmentStore store = makeEmptyStore()) { + assertNull(store.getMetadata()); + } + } + + @Test + public void validMetadata() throws Exception { + try (BigSegmentStore store = makeEmptyStore()) { + StoreMetadata metadata = new StoreMetadata(System.currentTimeMillis()); + setMetadata(prefix, metadata); + + StoreMetadata result = store.getMetadata(); + assertNotNull(result); + assertEquals(metadata.getLastUpToDate(), result.getLastUpToDate()); + } + } + + @Test + public void membershipNotFound() throws Exception { + try (BigSegmentStore store = makeEmptyStore()) { + Membership membership = store.getMembership(fakeUserHash); + + // Either null or an empty membership is allowed + if (membership != null) { + assertEqualMembership(createMembershipFromSegmentRefs(null, null), membership); + } + } + } + + @Test + public void membershipFound() throws Exception { + List membershipsList = Arrays.asList( + new Memberships(Collections.singleton(segmentRef1), null), + new Memberships(Arrays.asList(segmentRef1, segmentRef2), null), + new Memberships(null, Collections.singleton(segmentRef1)), + new Memberships(null, Arrays.asList(segmentRef1, segmentRef2)), + new Memberships(Arrays.asList(segmentRef1, segmentRef2), Arrays.asList(segmentRef2, segmentRef3))); + + for (Memberships memberships : membershipsList) { + try (BigSegmentStore store = makeEmptyStore()) { + setSegments(prefix, fakeUserHash, memberships.inclusions, memberships.exclusions); + Membership membership = store.getMembership(fakeUserHash); + assertEqualMembership(createMembershipFromSegmentRefs(memberships.inclusions, memberships.exclusions), membership); + } + } + } + + private static class Memberships { + final Iterable inclusions; + final Iterable exclusions; + + Memberships(Iterable inclusions, Iterable exclusions) { + this.inclusions = inclusions == null ? Collections.emptyList() : inclusions; + this.exclusions = exclusions == null ? Collections.emptyList() : exclusions; + } + } + + private void assertEqualMembership(Membership expected, Membership actual) { + if (actual.getClass().getCanonicalName() + .startsWith("com.launchdarkly.sdk.server.interfaces.BigSegmentStoreTypes.MembershipBuilder")) { + // The store implementation is using our standard membership types, so we can rely on the + // standard equality test for those + assertEquals(expected, actual); + } else { + // The store implementation has implemented Membership some other way, so we have to check for + // the inclusion or exclusion of specific keys + for (String segmentRef : allSegmentRefs) { + Boolean expectedMembership = expected.checkMembership(segmentRef); + Boolean actualMembership = actual.checkMembership(segmentRef); + if (!Objects.equals(actualMembership, expectedMembership)) { + Assert.fail(String.format("expected membership for %s to be %s but was %s", + segmentRef, + expectedMembership == null ? "null" : expectedMembership.toString(), + actualMembership == null ? "null" : actualMembership.toString())); + } + } + } + } + + /** + * Test classes should override this method to return a configured factory for the subject + * implementation of {@link BigSegmentStore}. + *

    + * If the prefix string is {@code null} or the empty string, it should use the default prefix + * defined by the data store implementation. The factory must include any necessary configuration + * that may be appropriate for the test environment (for instance, pointing it to a database + * instance that has been set up for the tests). + * + * @param prefix the database prefix + * @return the configured factory + */ + protected abstract ComponentConfigurer makeStore(String prefix); + + /** + * Test classes should override this method to clear all data from the underlying data store for + * the specified prefix string. + * + * @param prefix the database prefix + */ + protected abstract void clearData(String prefix); + + /** + * Test classes should override this method to update the store metadata for the given prefix in + * the underlying data store. + * + * @param prefix the database prefix + * @param metadata the data to write to the store + */ + protected abstract void setMetadata(String prefix, StoreMetadata metadata); + + /** + * Test classes should override this method to update the store metadata for the given prefix in + * the underlying data store. + * + * @param prefix the database prefix + * @param userHashKey the hashed user key + * @param includedSegmentRefs segment references to be included + * @param excludedSegmentRefs segment references to be excluded + */ + protected abstract void setSegments(String prefix, + String userHashKey, + Iterable includedSegmentRefs, + Iterable excludedSegmentRefs); +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBaseTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBaseTest.java new file mode 100644 index 0000000..a787803 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentStoreTestBaseTest.java @@ -0,0 +1,89 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.StoreMetadata; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; + +@SuppressWarnings("javadoc") +public class BigSegmentStoreTestBaseTest extends BigSegmentStoreTestBase { + // This runs BigSegmentStoreTestBase against a mock store implementation that is known to behave + // as expected, to verify that the test suite logic has the correct expectations. + + private static class DataSet { + StoreMetadata metadata = null; + Map memberships = new HashMap<>(); + } + + private final Map allData = new HashMap<>(); + + private DataSet getOrCreateDataSet(String prefix) { + allData.putIfAbsent(prefix, new DataSet()); + return allData.get(prefix); + } + + @Override + protected ComponentConfigurer makeStore(String prefix) { + return new MockStoreFactory(getOrCreateDataSet(prefix)); + } + + @Override + protected void clearData(String prefix) { + DataSet dataSet = getOrCreateDataSet(prefix); + dataSet.metadata = null; + dataSet.memberships.clear(); + } + + @Override + protected void setMetadata(String prefix, StoreMetadata metadata) { + DataSet dataSet = getOrCreateDataSet(prefix); + dataSet.metadata = metadata; + } + + @Override + protected void setSegments(String prefix, String userHashKey, Iterable includedSegmentRefs, Iterable excludedSegmentRefs) { + DataSet dataSet = getOrCreateDataSet(prefix); + dataSet.memberships.put(userHashKey, createMembershipFromSegmentRefs(includedSegmentRefs, excludedSegmentRefs)); + } + + private static final class MockStoreFactory implements ComponentConfigurer { + private final DataSet data; + + private MockStoreFactory(DataSet data) { + this.data = data; + } + + @Override + public BigSegmentStore build(ClientContext context) { + return new MockStore(data); + } + } + + private static final class MockStore implements BigSegmentStore { + private final DataSet data; + + private MockStore(DataSet data) { + this.data = data; + } + + @Override + public Membership getMembership(String userHash) { + return data.memberships.get(userHash); + } + + @Override + public StoreMetadata getMetadata() { + return data.metadata; + } + + @Override + public void close() throws IOException { } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilderTest.java new file mode 100644 index 0000000..ed8f5b7 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/BigSegmentsConfigurationBuilderTest.java @@ -0,0 +1,82 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.TestComponents; +import com.launchdarkly.sdk.server.TestUtil.BuilderPropertyTester; +import com.launchdarkly.sdk.server.TestUtil.BuilderTestUtil; +import com.launchdarkly.sdk.server.interfaces.BigSegmentsConfiguration; +import com.launchdarkly.sdk.server.subsystems.BigSegmentStore; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; + +import org.easymock.IMocksControl; +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.TestComponents.specificComponent; +import static org.easymock.EasyMock.createStrictControl; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class BigSegmentsConfigurationBuilderTest { + + private final BuilderTestUtil tester; + + public BigSegmentsConfigurationBuilderTest() { + tester = new BuilderTestUtil<>(() -> Components.bigSegments(null), + b -> b.build(null)); + } + + @Test + public void storeFactory() { + IMocksControl ctrl = createStrictControl(); + BigSegmentStore storeMock = ctrl.createMock(BigSegmentStore.class); + ComponentConfigurer storeFactory = specificComponent(storeMock); + + BigSegmentsConfigurationBuilder b = Components.bigSegments(storeFactory); + BigSegmentsConfiguration c = b.build(TestComponents.clientContext("", new LDConfig.Builder().build())); + + assertSame(storeMock, c.getStore()); + } + + @Test + public void userCacheSize() { + BuilderPropertyTester prop = tester.property(BigSegmentsConfiguration::getUserCacheSize, + BigSegmentsConfigurationBuilder::userCacheSize); + prop.assertDefault(BigSegmentsConfigurationBuilder.DEFAULT_USER_CACHE_SIZE); + prop.assertCanSet(500); + prop.assertCanSet(0); + prop.assertSetIsChangedTo(-1, 0); + } + + @Test + public void userCacheTime() { + BuilderPropertyTester prop = tester.property(BigSegmentsConfiguration::getUserCacheTime, + BigSegmentsConfigurationBuilder::userCacheTime); + prop.assertDefault(BigSegmentsConfigurationBuilder.DEFAULT_USER_CACHE_TIME); + prop.assertCanSet(Duration.ofSeconds(10)); + prop.assertSetIsChangedTo(null, BigSegmentsConfigurationBuilder.DEFAULT_USER_CACHE_TIME); + prop.assertSetIsChangedTo(Duration.ofSeconds(-1), BigSegmentsConfigurationBuilder.DEFAULT_USER_CACHE_TIME); + } + + @Test + public void statusPollInterval() { + BuilderPropertyTester prop = tester.property(BigSegmentsConfiguration::getStatusPollInterval, + BigSegmentsConfigurationBuilder::statusPollInterval); + prop.assertDefault(BigSegmentsConfigurationBuilder.DEFAULT_STATUS_POLL_INTERVAL); + prop.assertCanSet(Duration.ofSeconds(10)); + prop.assertSetIsChangedTo(null, BigSegmentsConfigurationBuilder.DEFAULT_STATUS_POLL_INTERVAL); + prop.assertSetIsChangedTo(Duration.ofSeconds(-1), BigSegmentsConfigurationBuilder.DEFAULT_STATUS_POLL_INTERVAL); + } + + @Test + public void staleAfter() { + BuilderPropertyTester prop = tester.property(BigSegmentsConfiguration::getStaleAfter, + BigSegmentsConfigurationBuilder::staleAfter); + prop.assertDefault(BigSegmentsConfigurationBuilder.DEFAULT_STALE_AFTER); + prop.assertCanSet(Duration.ofSeconds(10)); + prop.assertSetIsChangedTo(null, BigSegmentsConfigurationBuilder.DEFAULT_STALE_AFTER); + prop.assertSetIsChangedTo(Duration.ofSeconds(-1), BigSegmentsConfigurationBuilder.DEFAULT_STALE_AFTER); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ClientWithFileDataSourceTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ClientWithFileDataSourceTest.java new file mode 100644 index 0000000..a706fc3 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ClientWithFileDataSourceTest.java @@ -0,0 +1,48 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.LDConfig; + +import org.junit.Test; + +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FLAG_VALUE_1; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FLAG_VALUE_1_KEY; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FULL_FLAG_1_KEY; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FULL_FLAG_1_VALUE; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.resourceFilePath; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class ClientWithFileDataSourceTest { + private static final LDContext user = LDContext.create("userkey"); + + private LDClient makeClient() throws Exception { + FileDataSourceBuilder fdsb = FileData.dataSource() + .filePaths(resourceFilePath("all-properties.json")); + LDConfig config = new LDConfig.Builder() + .dataSource(fdsb) + .events(Components.noEvents()) + .build(); + return new LDClient("sdkKey", config); + } + + @Test + public void fullFlagDefinitionEvaluatesAsExpected() throws Exception { + try (LDClient client = makeClient()) { + assertThat(client.jsonValueVariation(FULL_FLAG_1_KEY, user, LDValue.of("default")), + equalTo(FULL_FLAG_1_VALUE)); + } + } + + @Test + public void simplifiedFlagEvaluatesAsExpected() throws Exception { + try (LDClient client = makeClient()) { + assertThat(client.jsonValueVariation(FLAG_VALUE_1_KEY, user, LDValue.of("default")), + equalTo(FLAG_VALUE_1)); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/DataLoaderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/DataLoaderTest.java new file mode 100644 index 0000000..f572d86 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/DataLoaderTest.java @@ -0,0 +1,195 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.integrations.FileDataSourceImpl.DataBuilder; +import com.launchdarkly.sdk.server.integrations.FileDataSourceImpl.DataLoader; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FileDataException; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.testhelpers.JsonTestValue; + +import org.junit.Assert; +import org.junit.Test; + +import java.util.Map; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toDataMap; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FLAG_VALUE_1_KEY; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.resourceFilePath; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.resourceLocation; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonIncludes; +import static com.launchdarkly.testhelpers.JsonTestValue.jsonOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class DataLoaderTest { + private DataBuilder builder = new DataBuilder(FileData.DuplicateKeysHandling.FAIL); + + @Test + public void canLoadFromFilePath() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths(resourceFilePath("flag-only.json")).sources); + ds.load(builder); + assertDataHasItemsOfKind(FEATURES); + } + + @Test + public void canLoadFromClasspath() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().classpathResources(resourceLocation("flag-only.json")).sources); + ds.load(builder); + assertDataHasItemsOfKind(FEATURES); + } + + + @Test + public void yamlFileIsAutoDetected() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths(resourceFilePath("flag-only.yml")).sources); + ds.load(builder); + assertDataHasItemsOfKind(FEATURES); + } + + @Test + public void jsonFileIsAutoDetected() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths(resourceFilePath("segment-only.json")).sources); + ds.load(builder); + assertDataHasItemsOfKind(SEGMENTS); + } + + @Test + public void canLoadMultipleFiles() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("flag-only.json"), + resourceFilePath("segment-only.yml") + ).sources); + ds.load(builder); + assertDataHasItemsOfKind(FEATURES); + assertDataHasItemsOfKind(SEGMENTS); + } + + @Test + public void flagValueIsConvertedToFlag() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths(resourceFilePath("value-only.json")).sources); + String expected = + "{\"key\":\"flag2\",\"on\":true,\"fallthrough\":{\"variation\":0},\"variations\":[\"value2\"]," + + "\"trackEvents\":false,\"deleted\":false,\"version\":1}"; + ds.load(builder); + assertThat(getItemAsJson(builder, FEATURES, FLAG_VALUE_1_KEY), jsonIncludes(expected)); + // Note, we're using jsonIncludes instead of jsonEquals because the version of the Java + // SDK we're building against may have more properties than it did when the test was written. + } + + @Test + public void duplicateFlagKeyInFlagsThrowsExceptionByDefault() throws Exception { + try { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("flag-only.json"), + resourceFilePath("flag-with-duplicate-key.json") + ).sources); + ds.load(builder); + } catch (FileDataException e) { + assertThat(e.getMessage(), containsString("key \"flag1\" was already defined")); + } + } + + @Test + public void duplicateFlagKeyInFlagsAndFlagValuesThrowsExceptionByDefault() throws Exception { + try { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("flag-only.json"), + resourceFilePath("value-with-duplicate-key.json") + ).sources); + ds.load(builder); + } catch (FileDataException e) { + assertThat(e.getMessage(), containsString("key \"flag1\" was already defined")); + } + } + + @Test + public void duplicateSegmentKeyThrowsExceptionByDefault() throws Exception { + try { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("segment-only.json"), + resourceFilePath("segment-with-duplicate-key.json") + ).sources); + ds.load(builder); + } catch (FileDataException e) { + assertThat(e.getMessage(), containsString("key \"seg1\" was already defined")); + } + } + + @Test + public void duplicateKeysCanBeAllowed() throws Exception { + DataBuilder data1 = new DataBuilder(FileData.DuplicateKeysHandling.IGNORE); + DataLoader loader1 = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("flag-only.json"), + resourceFilePath("flag-with-duplicate-key.json") + ).sources); + loader1.load(data1); + assertThat(getItemAsJson(data1, FEATURES, "flag1"), jsonIncludes("{\"on\":true}")); // value from first file + + DataBuilder data2 = new DataBuilder(FileData.DuplicateKeysHandling.IGNORE); + DataLoader loader2 = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("value-with-duplicate-key.json"), + resourceFilePath("flag-only.json") + ).sources); + loader2.load(data2); + assertThat(getItemAsJson(data2, FEATURES, "flag2"), + jsonIncludes("{\"variations\":[\"value2a\"]}")); // value from first file + + DataBuilder data3 = new DataBuilder(FileData.DuplicateKeysHandling.IGNORE); + DataLoader loader3 = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("segment-only.json"), + resourceFilePath("segment-with-duplicate-key.json") + ).sources); + loader3.load(data3); + assertThat(getItemAsJson(data3, SEGMENTS, "seg1"), + jsonIncludes("{\"included\":[\"user1\"]}")); // value from first file + } + + @Test + public void versionsAreIncrementedForEachLoad() throws Exception { + DataLoader ds = new DataLoader(FileData.dataSource().filePaths( + resourceFilePath("flag-only.json"), + resourceFilePath("segment-only.json"), + resourceFilePath("value-only.json") + ).sources); + + DataBuilder data1 = new DataBuilder(FileData.DuplicateKeysHandling.FAIL); + ds.load(data1); + assertVersionsMatch(data1.build(), 1); + + DataBuilder data2 = new DataBuilder(FileData.DuplicateKeysHandling.FAIL); + ds.load(data2); + assertVersionsMatch(data2.build(), 2); + } + + private void assertDataHasItemsOfKind(DataKind kind) { + Map items = toDataMap(builder.build()).get(kind); + if (items == null || items.size() == 0) { + Assert.fail("expected at least one item in \"" + kind.getName() + "\", received: " + builder.build()); + } + } + + private void assertVersionsMatch(FullDataSet data, int expectedVersion) { + for (Map.Entry> kv1: data.getData()) { + DataKind kind = kv1.getKey(); + for (Map.Entry kv2: kv1.getValue().getItems()) { + ItemDescriptor item = kv2.getValue(); + String jsonData = kind.serialize(item); + assertThat("descriptor version of " + kv2.getKey(), item.getVersion(), equalTo(expectedVersion)); + assertThat("version in data model object of " + kv2.getKey(), LDValue.parse(jsonData).get("version"), + equalTo(LDValue.of(expectedVersion))); + } + } + } + + private JsonTestValue getItemAsJson(DataBuilder builder, DataKind kind, String key) { + ItemDescriptor flag = toDataMap(builder.build()).get(kind).get(key); + return jsonOf(kind.serialize(flag)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilderTest.java new file mode 100644 index 0000000..29f6c1f --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/EventProcessorBuilderTest.java @@ -0,0 +1,115 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.EventSender; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.Components.sendEvents; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.DEFAULT_CAPACITY; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.DEFAULT_FLUSH_INTERVAL; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.DEFAULT_USER_KEYS_CAPACITY; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.DEFAULT_USER_KEYS_FLUSH_INTERVAL; +import static com.launchdarkly.sdk.server.integrations.EventProcessorBuilder.MIN_DIAGNOSTIC_RECORDING_INTERVAL; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class EventProcessorBuilderTest { + @Test + public void allAttributesPrivate() { + assertEquals(false, sendEvents().allAttributesPrivate); + + assertEquals(true, sendEvents().allAttributesPrivate(true).allAttributesPrivate); + + assertEquals(false, sendEvents() + .allAttributesPrivate(true) + .allAttributesPrivate(false) + .allAttributesPrivate); + } + + @Test + public void capacity() { + assertEquals(DEFAULT_CAPACITY, sendEvents().capacity); + + assertEquals(200, sendEvents().capacity(200).capacity); + } + + @Test + public void diagnosticRecordingInterval() { + EventProcessorBuilder builder1 = sendEvents(); + assertEquals(DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL, builder1.diagnosticRecordingInterval); + + EventProcessorBuilder builder2 = sendEvents().diagnosticRecordingInterval(Duration.ofSeconds(120)); + assertEquals(Duration.ofSeconds(120), builder2.diagnosticRecordingInterval); + + EventProcessorBuilder builder3 = sendEvents() + .diagnosticRecordingInterval(Duration.ofSeconds(120)) + .diagnosticRecordingInterval(null); // null sets it back to the default + assertEquals(DEFAULT_DIAGNOSTIC_RECORDING_INTERVAL, builder3.diagnosticRecordingInterval); + + EventProcessorBuilder builder4 = sendEvents().diagnosticRecordingInterval(Duration.ofSeconds(10)); + assertEquals(MIN_DIAGNOSTIC_RECORDING_INTERVAL, builder4.diagnosticRecordingInterval); + + } + + @Test + public void eventSender() { + assertNull(sendEvents().eventSenderConfigurer); + + ComponentConfigurer f = (ctx) -> null; + assertSame(f, sendEvents().eventSender(f).eventSenderConfigurer); + + assertNull(sendEvents().eventSender(f).eventSender(null).eventSenderConfigurer); + } + + @Test + public void flushInterval() { + EventProcessorBuilder builder1 = Components.sendEvents(); + assertEquals(DEFAULT_FLUSH_INTERVAL, builder1.flushInterval); + + EventProcessorBuilder builder2 = Components.sendEvents().flushInterval(Duration.ofSeconds(120)); + assertEquals(Duration.ofSeconds(120), builder2.flushInterval); + + EventProcessorBuilder builder3 = Components.sendEvents() + .flushInterval(Duration.ofSeconds(120)) + .flushInterval(null); // null sets it back to the default + assertEquals(DEFAULT_FLUSH_INTERVAL, builder3.flushInterval); + } + + @Test + public void privateAttributes() { + assertNull(sendEvents().privateAttributes); + + assertEquals(ImmutableSet.of(AttributeRef.fromLiteral("email"), AttributeRef.fromPath("/address/street")), + sendEvents().privateAttributes("email", "/address/street").privateAttributes); + } + + @Test + public void userKeysCapacity() { + assertEquals(DEFAULT_USER_KEYS_CAPACITY, sendEvents().userKeysCapacity); + + assertEquals(44, sendEvents().userKeysCapacity(44).userKeysCapacity); + } + + @Test + public void usrKeysFlushInterval() { + EventProcessorBuilder builder1 = Components.sendEvents(); + assertEquals(DEFAULT_USER_KEYS_FLUSH_INTERVAL, builder1.userKeysFlushInterval); + + EventProcessorBuilder builder2 = Components.sendEvents().userKeysFlushInterval(Duration.ofSeconds(120)); + assertEquals(Duration.ofSeconds(120), builder2.userKeysFlushInterval); + + EventProcessorBuilder builder3 = Components.sendEvents() + .userKeysFlushInterval(Duration.ofSeconds(120)) + .userKeysFlushInterval(null); // null sets it back to the default + assertEquals(DEFAULT_USER_KEYS_FLUSH_INTERVAL, builder3.userKeysFlushInterval); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceAutoUpdateTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceAutoUpdateTest.java new file mode 100644 index 0000000..7dae5f8 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceAutoUpdateTest.java @@ -0,0 +1,138 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.BaseTest; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.TestComponents; +import com.launchdarkly.sdk.server.TestComponents.MockDataSourceUpdates; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.testhelpers.TempDir; +import com.launchdarkly.testhelpers.TempFile; + +import org.junit.Test; + +import java.nio.file.Path; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toItemsMap; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.inMemoryDataStore; +import static com.launchdarkly.sdk.server.TestUtil.requireDataSourceStatusEventually; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.ALL_FLAG_KEYS; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.getResourceContents; +import static com.launchdarkly.testhelpers.Assertions.assertPolledFunctionReturnsValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class FileDataSourceAutoUpdateTest extends BaseTest { + private final DataStore store; + private MockDataSourceUpdates dataSourceUpdates; + private final LDConfig config = baseConfig().build(); + + public FileDataSourceAutoUpdateTest() throws Exception { + store = inMemoryDataStore(); + dataSourceUpdates = TestComponents.dataSourceUpdates(store); + } + + private static FileDataSourceBuilder makeFactoryWithFile(Path path) { + return FileData.dataSource().filePaths(path); + } + + private DataSource makeDataSource(FileDataSourceBuilder builder) { + return builder.build(clientContext("", config, dataSourceUpdates)); + } + + @Test + public void modifiedFileIsNotReloadedIfAutoUpdateIsOff() throws Exception { + try (TempDir dir = TempDir.create()) { + try (TempFile file = dir.tempFile(".json")) { + file.setContents(getResourceContents("flag-only.json")); + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.getPath()); + try (DataSource fp = makeDataSource(factory1)) { + fp.start(); + file.setContents(getResourceContents("segment-only.json")); + Thread.sleep(400); + assertThat(toItemsMap(store.getAll(FEATURES)).size(), equalTo(1)); + assertThat(toItemsMap(store.getAll(SEGMENTS)).size(), equalTo(0)); + } + } + } + } + + // Note that the auto-update tests may fail when run on a Mac, but succeed on Ubuntu. This is because on + // MacOS there is no native implementation of WatchService, and the default implementation is known + // to be extremely slow. See: https://stackoverflow.com/questions/9588737/is-java-7-watchservice-slow-for-anyone-else + @Test + public void modifiedFileIsReloadedIfAutoUpdateIsOn() throws Exception { + try (TempDir dir = TempDir.create()) { + try (TempFile file = dir.tempFile(".json")) { + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.getPath()).autoUpdate(true); + file.setContents(getResourceContents("flag-only.json")); // this file has 1 flag + try (DataSource fp = makeDataSource(factory1)) { + fp.start(); + Thread.sleep(1000); + file.setContents(getResourceContents("all-properties.json")); // this file has all the flags + assertPolledFunctionReturnsValue(10, TimeUnit.SECONDS, 500, TimeUnit.MILLISECONDS, () -> { + if (toItemsMap(store.getAll(FEATURES)).size() == ALL_FLAG_KEYS.size()) { + // success - return a non-null value to make repeatWithTimeout end + return fp; + } + return null; + }); + } + } + } + } + + @Test + public void ifFilesAreBadAtStartTimeAutoUpdateCanStillLoadGoodDataLater() throws Exception { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.register(statuses::add); + + try (TempDir dir = TempDir.create()) { + try (TempFile file = dir.tempFile(".json")) { + file.setContents("not valid"); + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.getPath()).autoUpdate(true); + try (DataSource fp = makeDataSource(factory1)) { + fp.start(); + Thread.sleep(1000); + file.setContents(getResourceContents("flag-only.json")); // this file has 1 flag + assertPolledFunctionReturnsValue(10, TimeUnit.SECONDS, 500, TimeUnit.MILLISECONDS, () -> { + if (toItemsMap(store.getAll(FEATURES)).size() > 0) { + // success - status is now VALID, after having first been INITIALIZING - can still see that an error occurred + DataSourceStatusProvider.Status status = requireDataSourceStatusEventually(statuses, + DataSourceStatusProvider.State.VALID, DataSourceStatusProvider.State.INITIALIZING); + assertNotNull(status.getLastError()); + assertEquals(DataSourceStatusProvider.ErrorKind.INVALID_DATA, status.getLastError().getKind()); + + return status; + } + return null; + }); + } + } + } + } + + @Test + public void autoUpdateDoesNothingForClasspathResource() throws Exception { + // This just verifies that we don't cause an exception by trying to start a FileWatcher for + // something that isn't a real file. + FileDataSourceBuilder factory = FileData.dataSource() + .classpathResources(FileDataSourceTestData.resourceLocation("all-properties.json")) + .autoUpdate(true); + try (DataSource fp = makeDataSource(factory)) { + fp.start(); + assertTrue(fp.isInitialized()); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTest.java new file mode 100644 index 0000000..e0bc143 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTest.java @@ -0,0 +1,171 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.server.BaseTest; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.TestComponents; +import com.launchdarkly.sdk.server.TestComponents.MockDataSourceUpdates; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataStore; +import com.launchdarkly.testhelpers.TempFile; + +import org.junit.Test; + +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; + +import static com.google.common.collect.Iterables.size; +import static com.launchdarkly.sdk.server.DataModel.FEATURES; +import static com.launchdarkly.sdk.server.DataModel.SEGMENTS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toItemsMap; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.sdk.server.TestComponents.inMemoryDataStore; +import static com.launchdarkly.sdk.server.TestUtil.requireDataSourceStatus; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.ALL_FLAG_KEYS; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.ALL_SEGMENT_KEYS; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.resourceFilePath; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class FileDataSourceTest extends BaseTest { + private static final Path badFilePath = Paths.get("no-such-file.json"); + + private final DataStore store; + private MockDataSourceUpdates dataSourceUpdates; + private final LDConfig config = baseConfig().build(); + + public FileDataSourceTest() throws Exception { + store = inMemoryDataStore(); + dataSourceUpdates = TestComponents.dataSourceUpdates(store); + } + + private static FileDataSourceBuilder makeFactoryWithFile(Path path) { + return FileData.dataSource().filePaths(path); + } + + private DataSource makeDataSource(FileDataSourceBuilder builder) { + return builder.build(clientContext("", config, dataSourceUpdates)); + } + + @Test + public void flagsAreNotLoadedUntilStart() throws Exception { + FileDataSourceBuilder factory = makeFactoryWithFile(resourceFilePath("all-properties.json")); + try (DataSource fp = makeDataSource(factory)) { + assertThat(store.isInitialized(), equalTo(false)); + assertThat(size(store.getAll(FEATURES).getItems()), equalTo(0)); + assertThat(size(store.getAll(SEGMENTS).getItems()), equalTo(0)); + } + } + + @Test + public void flagsAreLoadedOnStart() throws Exception { + FileDataSourceBuilder factory = makeFactoryWithFile(resourceFilePath("all-properties.json")); + try (DataSource fp = makeDataSource(factory)) { + verifySuccessfulStart(fp); + + assertThat(toItemsMap(store.getAll(FEATURES)).keySet(), equalTo(ALL_FLAG_KEYS)); + assertThat(toItemsMap(store.getAll(SEGMENTS)).keySet(), equalTo(ALL_SEGMENT_KEYS)); + } + } + + @Test + public void filePathsCanBeSpecifiedAsStrings() throws Exception { + FileDataSourceBuilder factory = FileData.dataSource().filePaths(resourceFilePath("all-properties.json").toString()); + try (DataSource fp = makeDataSource(factory)) { + verifySuccessfulStart(fp); + + assertThat(toItemsMap(store.getAll(FEATURES)).keySet(), equalTo(ALL_FLAG_KEYS)); + assertThat(toItemsMap(store.getAll(SEGMENTS)).keySet(), equalTo(ALL_SEGMENT_KEYS)); + } + } + + @Test + public void flagsAreLoadedOnStartFromYamlFile() throws Exception { + FileDataSourceBuilder factory = makeFactoryWithFile(resourceFilePath("all-properties.yml")); + try (DataSource fp = makeDataSource(factory)) { + verifySuccessfulStart(fp); + + assertThat(toItemsMap(store.getAll(FEATURES)).keySet(), equalTo(ALL_FLAG_KEYS)); + assertThat(toItemsMap(store.getAll(SEGMENTS)).keySet(), equalTo(ALL_SEGMENT_KEYS)); + } + } + + @Test + public void startSucceedsWithEmptyFile() throws Exception { + try (DataSource fp = makeDataSource(makeFactoryWithFile(resourceFilePath("no-data.json")))) { + verifySuccessfulStart(fp); + + assertThat(toItemsMap(store.getAll(FEATURES)).keySet(), equalTo(ImmutableSet.of())); + assertThat(toItemsMap(store.getAll(SEGMENTS)).keySet(), equalTo(ImmutableSet.of())); + } + } + + private void verifySuccessfulStart(DataSource fp) { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.register(statuses::add); + + Future future = fp.start(); + + assertThat(future.isDone(), equalTo(true)); + assertThat(store.isInitialized(), equalTo(true)); + requireDataSourceStatus(statuses, DataSourceStatusProvider.State.VALID); + } + + @Test + public void startFailsWithNonexistentFile() throws Exception { + try (DataSource fp = makeDataSource(makeFactoryWithFile(badFilePath))) { + verifyUnsuccessfulStart(fp); + } + } + + @Test + public void startFailsWithNonexistentClasspathResource() throws Exception { + FileDataSourceBuilder factory = FileData.dataSource().classpathResources("we-have-no-such-thing"); + try (DataSource fp = makeDataSource(factory)) { + verifyUnsuccessfulStart(fp); + } + } + + private void verifyUnsuccessfulStart(DataSource fp) { + BlockingQueue statuses = new LinkedBlockingQueue<>(); + dataSourceUpdates.register(statuses::add); + + Future future = fp.start(); + + assertThat(future.isDone(), equalTo(true)); + assertThat(store.isInitialized(), equalTo(false)); + DataSourceStatusProvider.Status status = requireDataSourceStatus(statuses, DataSourceStatusProvider.State.INITIALIZING); + assertEquals(DataSourceStatusProvider.ErrorKind.INVALID_DATA, status.getLastError().getKind()); + } + + @Test + public void instantiationOfArbitraryTypeIsNotAllowed() throws Exception { + // test for https://nvd.nist.gov/vuln/detail/CVE-2022-1471 - this test fails if we use the + // empty Yaml() constructor in FileDataSourceParsing + String className = SimulatedMaliciousType.class.getName(); + Class.forName(this.getClass().getName()); + Class.forName(className); + try (TempFile f = TempFile.create()) { + f.setContents("---\nbad_thing: !!" + className + " [value]\n"); + try (DataSource fp = makeDataSource(FileData.dataSource().filePaths(f.getPath()))) { + verifyUnsuccessfulStart(fp); + assertThat(SimulatedMaliciousType.wasInstantiated, is(false)); + } + } + } + + public static class SimulatedMaliciousType { + static volatile boolean wasInstantiated = false; + + public SimulatedMaliciousType(String value) { + wasInstantiated = true; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTestData.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTestData.java new file mode 100644 index 0000000..31c5061 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FileDataSourceTestData.java @@ -0,0 +1,50 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.LDValue; + +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; +import java.util.Set; + +@SuppressWarnings("javadoc") +public class FileDataSourceTestData { + // These should match the data in our test files + public static final String FULL_FLAG_1_KEY = "flag1"; + public static final LDValue FULL_FLAG_1 = + LDValue.parse("{\"key\":\"flag1\",\"on\":true,\"fallthrough\":{\"variation\":2},\"variations\":[\"fall\",\"off\",\"on\"]}"); + public static final LDValue FULL_FLAG_1_VALUE = LDValue.of("on"); + public static final Map FULL_FLAGS = + ImmutableMap.of(FULL_FLAG_1_KEY, FULL_FLAG_1); + + public static final String FLAG_VALUE_1_KEY = "flag2"; + public static final LDValue FLAG_VALUE_1 = LDValue.of("value2"); + public static final Map FLAG_VALUES = + ImmutableMap.of(FLAG_VALUE_1_KEY, FLAG_VALUE_1); + + public static final String FULL_SEGMENT_1_KEY = "seg1"; + public static final LDValue FULL_SEGMENT_1 = LDValue.parse("{\"key\":\"seg1\",\"included\":[\"user1\"]}"); + public static final Map FULL_SEGMENTS = + ImmutableMap.of(FULL_SEGMENT_1_KEY, FULL_SEGMENT_1); + + public static final Set ALL_FLAG_KEYS = ImmutableSet.of(FULL_FLAG_1_KEY, FLAG_VALUE_1_KEY); + public static final Set ALL_SEGMENT_KEYS = ImmutableSet.of(FULL_SEGMENT_1_KEY); + + public static Path resourceFilePath(String filename) throws URISyntaxException { + URL resource = FileDataSourceTestData.class.getClassLoader().getResource(resourceLocation(filename)); + return Paths.get(resource.toURI()); + } + + public static String resourceLocation(String filename) throws URISyntaxException { + return "filesource/" + filename; + } + + public static String getResourceContents(String filename) throws Exception { + return new String(Files.readAllBytes(resourceFilePath(filename))); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserJsonTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserJsonTest.java new file mode 100644 index 0000000..45fe4cd --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserJsonTest.java @@ -0,0 +1,10 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.JsonFlagFileParser; + +@SuppressWarnings("javadoc") +public class FlagFileParserJsonTest extends FlagFileParserTestBase { + public FlagFileParserJsonTest() { + super(new JsonFlagFileParser(), ".json"); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserTestBase.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserTestBase.java new file mode 100644 index 0000000..1829cde --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserTestBase.java @@ -0,0 +1,88 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FileDataException; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FlagFileParser; +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.FlagFileRep; + +import org.junit.Test; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.net.URISyntaxException; + +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FLAG_VALUES; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FULL_FLAGS; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.FULL_SEGMENTS; +import static com.launchdarkly.sdk.server.integrations.FileDataSourceTestData.resourceFilePath; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.fail; + +@SuppressWarnings("javadoc") +public abstract class FlagFileParserTestBase { + private final FlagFileParser parser; + private final String fileExtension; + + protected FlagFileParserTestBase(FlagFileParser parser, String fileExtension) { + this.parser = parser; + this.fileExtension = fileExtension; + } + + @Test + public void canParseFileWithAllProperties() throws Exception { + try (FileInputStream input = openFile("all-properties")) { + FlagFileRep data = parser.parse(input); + assertThat(data.flags, equalTo(FULL_FLAGS)); + assertThat(data.flagValues, equalTo(FLAG_VALUES)); + assertThat(data.segments, equalTo(FULL_SEGMENTS)); + } + } + + @Test + public void canParseFileWithOnlyFullFlag() throws Exception { + try (FileInputStream input = openFile("flag-only")) { + FlagFileRep data = parser.parse(input); + assertThat(data.flags, equalTo(FULL_FLAGS)); + assertThat(data.flagValues, nullValue()); + assertThat(data.segments, nullValue()); + } + } + + @Test + public void canParseFileWithOnlyFlagValue() throws Exception { + try (FileInputStream input = openFile("value-only")) { + FlagFileRep data = parser.parse(input); + assertThat(data.flags, nullValue()); + assertThat(data.flagValues, equalTo(FLAG_VALUES)); + assertThat(data.segments, nullValue()); + } + } + + @Test + public void canParseFileWithOnlySegment() throws Exception { + try (FileInputStream input = openFile("segment-only")) { + FlagFileRep data = parser.parse(input); + assertThat(data.flags, nullValue()); + assertThat(data.flagValues, nullValue()); + assertThat(data.segments, equalTo(FULL_SEGMENTS)); + } + } + + @Test + public void throwsExpectedErrorForBadFile() throws Exception { + try (FileInputStream input = openFile("malformed")) { + try { + parser.parse(input); + fail("expected exception"); + } catch (FileDataException e) { + assertThat(e.getDescription(), not(nullValue())); + } + } + } + + private FileInputStream openFile(String name) throws URISyntaxException, FileNotFoundException { + return new FileInputStream(resourceFilePath(name + fileExtension).toFile()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserYamlTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserYamlTest.java new file mode 100644 index 0000000..ce9100c --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/FlagFileParserYamlTest.java @@ -0,0 +1,10 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.integrations.FileDataSourceParsing.YamlFlagFileParser; + +@SuppressWarnings("javadoc") +public class FlagFileParserYamlTest extends FlagFileParserTestBase { + public FlagFileParserYamlTest() { + super(new YamlFlagFileParser(), ".yml"); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HookConfigurationBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HookConfigurationBuilderTest.java new file mode 100644 index 0000000..d0571fc --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HookConfigurationBuilderTest.java @@ -0,0 +1,30 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.subsystems.HookConfiguration; +import org.junit.Test; + +import java.util.Arrays; + +import static org.easymock.EasyMock.mock; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; + +public class HookConfigurationBuilderTest { + + @Test + public void emptyHooksAsDefault() { + HookConfiguration configuration = Components.hooks().build(); + assertEquals(0, configuration.getHooks().size()); + } + + @Test + public void canSetHooks() { + Hook hookA = mock(Hook.class); + Hook hookB = mock(Hook.class); + HookConfiguration configuration = Components.hooks().setHooks(Arrays.asList(hookA, hookB)).build(); + assertEquals(2, configuration.getHooks().size()); + assertSame(hookA, configuration.getHooks().get(0)); + assertSame(hookB, configuration.getHooks().get(1)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilderTest.java new file mode 100644 index 0000000..7eeea05 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/HttpConfigurationBuilderTest.java @@ -0,0 +1,234 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.HttpConfiguration; + +import org.junit.Test; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.Proxy; +import java.net.Socket; +import java.net.UnknownHostException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.time.Duration; + +import javax.net.SocketFactory; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.X509TrustManager; + +import static com.launchdarkly.sdk.server.TestUtil.getSdkVersion; +import static com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder.DEFAULT_CONNECT_TIMEOUT; +import static com.launchdarkly.sdk.server.integrations.HttpConfigurationBuilder.DEFAULT_SOCKET_TIMEOUT; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +@SuppressWarnings("javadoc") +public class HttpConfigurationBuilderTest { + private static final String SDK_KEY = "sdk-key"; + private static final ClientContext BASIC_CONTEXT = new ClientContext(SDK_KEY); + + private static ImmutableMap.Builder buildBasicHeaders() { + return ImmutableMap.builder() + .put("Authorization", SDK_KEY) + .put("User-Agent", "JavaClient/" + getSdkVersion()); + } + + @Test + public void testDefaults() { + HttpConfiguration hc = Components.httpConfiguration().build(BASIC_CONTEXT); + assertEquals(DEFAULT_CONNECT_TIMEOUT, hc.getConnectTimeout()); + assertNull(hc.getProxy()); + assertNull(hc.getProxyAuthentication()); + assertEquals(DEFAULT_SOCKET_TIMEOUT, hc.getSocketTimeout()); + assertNull(hc.getSocketFactory()); + assertNull(hc.getSslSocketFactory()); + assertNull(hc.getTrustManager()); + assertEquals(buildBasicHeaders().build(), ImmutableMap.copyOf(hc.getDefaultHeaders())); + } + + @Test + public void testCanSetCustomHeaders() { + HttpConfiguration hc = Components.httpConfiguration() + .addCustomHeader("X-LaunchDarkly-Test-Label", "my-cool-label") + .addCustomHeader("X-Header-Message", "Java FTW") + .addCustomHeader("Authorization", "I can override this") + .addCustomHeader("User-Agent", "This too") + .build(BASIC_CONTEXT); + + ImmutableMap expectedHeaders = ImmutableMap.builder() + .put("X-LaunchDarkly-Test-Label", "my-cool-label") + .put("X-Header-Message", "Java FTW") + .put("Authorization", "I can override this") + .put("User-Agent", "This too") + .build(); + + assertEquals(expectedHeaders, ImmutableMap.copyOf(hc.getDefaultHeaders())); + } + + @Test + public void testConnectTimeout() { + HttpConfiguration hc = Components.httpConfiguration() + .connectTimeout(Duration.ofMillis(999)) + .build(BASIC_CONTEXT); + assertEquals(999, hc.getConnectTimeout().toMillis()); + + HttpConfiguration hc2 = Components.httpConfiguration() + .connectTimeout(Duration.ofMillis(999)) + .connectTimeout(null) + .build(BASIC_CONTEXT); + assertEquals(DEFAULT_CONNECT_TIMEOUT, hc2.getConnectTimeout()); +} + + @Test + public void testProxy() { + HttpConfiguration hc = Components.httpConfiguration() + .proxyHostAndPort("my-proxy", 1234) + .build(BASIC_CONTEXT); + assertEquals(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("my-proxy", 1234)), hc.getProxy()); + assertNull(hc.getProxyAuthentication()); + } + + @Test + public void testProxyBasicAuth() { + HttpConfiguration hc = Components.httpConfiguration() + .proxyHostAndPort("my-proxy", 1234) + .proxyAuth(Components.httpBasicAuthentication("user", "pass")) + .build(BASIC_CONTEXT); + assertEquals(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("my-proxy", 1234)), hc.getProxy()); + assertNotNull(hc.getProxyAuthentication()); + assertEquals("Basic dXNlcjpwYXNz", hc.getProxyAuthentication().provideAuthorization(null)); + } + + @Test + public void testSocketTimeout() { + HttpConfiguration hc1 = Components.httpConfiguration() + .socketTimeout(Duration.ofMillis(999)) + .build(BASIC_CONTEXT); + assertEquals(999, hc1.getSocketTimeout().toMillis()); + + HttpConfiguration hc2 = Components.httpConfiguration() + .socketTimeout(Duration.ofMillis(999)) + .socketTimeout(null) + .build(BASIC_CONTEXT); + assertEquals(DEFAULT_SOCKET_TIMEOUT, hc2.getSocketTimeout()); + } + + @Test + public void testSocketFactory() { + SocketFactory sf = new StubSocketFactory(); + HttpConfiguration hc = Components.httpConfiguration() + .socketFactory(sf) + .build(BASIC_CONTEXT); + assertSame(sf, hc.getSocketFactory()); + } + + @Test + public void testSslOptions() { + SSLSocketFactory sf = new StubSSLSocketFactory(); + X509TrustManager tm = new StubX509TrustManager(); + HttpConfiguration hc = Components.httpConfiguration() + .sslSocketFactory(sf, tm) + .build(BASIC_CONTEXT); + assertSame(sf, hc.getSslSocketFactory()); + assertSame(tm, hc.getTrustManager()); + } + + @Test + public void testWrapperNameOnly() { + HttpConfiguration hc = Components.httpConfiguration() + .wrapper("Scala", null) + .build(BASIC_CONTEXT); + assertEquals("Scala", ImmutableMap.copyOf(hc.getDefaultHeaders()).get("X-LaunchDarkly-Wrapper")); + } + + @Test + public void testWrapperWithVersion() { + HttpConfiguration hc = Components.httpConfiguration() + .wrapper("Scala", "0.1.0") + .build(BASIC_CONTEXT); + assertEquals("Scala/0.1.0", ImmutableMap.copyOf(hc.getDefaultHeaders()).get("X-LaunchDarkly-Wrapper")); + } + + @Test + public void testApplicationTags() { + ApplicationInfo info = new ApplicationInfo("authentication-service", "1.0.0"); + ClientContext contextWithTags = new ClientContext(SDK_KEY, info, null, null, false, null, 0, null); + HttpConfiguration hc = Components.httpConfiguration() + .build(contextWithTags); + assertEquals("application-id/authentication-service application-version/1.0.0", ImmutableMap.copyOf(hc.getDefaultHeaders()).get("X-LaunchDarkly-Tags")); + } + + public static class StubSocketFactory extends SocketFactory { + public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) + throws IOException { + return null; + } + + public Socket createSocket(String host, int port, InetAddress localHost, int localPort) + throws IOException, UnknownHostException { + return null; + } + + public Socket createSocket(InetAddress host, int port) throws IOException { + return null; + } + + public Socket createSocket(String host, int port) throws IOException, UnknownHostException { + return null; + } + + public Socket createSocket(Socket s, String host, int port, boolean autoClose) throws IOException { + return null; + } + } + + public static class StubSSLSocketFactory extends SSLSocketFactory { + public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) + throws IOException { + return null; + } + + public Socket createSocket(String host, int port, InetAddress localHost, int localPort) + throws IOException, UnknownHostException { + return null; + } + + public Socket createSocket(InetAddress host, int port) throws IOException { + return null; + } + + public Socket createSocket(String host, int port) throws IOException, UnknownHostException { + return null; + } + + public String[] getSupportedCipherSuites() { + return null; + } + + public String[] getDefaultCipherSuites() { + return null; + } + + public Socket createSocket(Socket s, String host, int port, boolean autoClose) throws IOException { + return null; + } + } + + public static class StubX509TrustManager implements X509TrustManager { + public X509Certificate[] getAcceptedIssuers() { + return null; + } + + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {} + + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {} + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilderTest.java new file mode 100644 index 0000000..e90d5fd --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/LoggingConfigurationBuilderTest.java @@ -0,0 +1,89 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.logging.LDLogLevel; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LDSLF4J; +import com.launchdarkly.logging.LogCapture; +import com.launchdarkly.logging.Logs; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.LoggingConfiguration; + +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.time.Duration; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class LoggingConfigurationBuilderTest { + private static final String SDK_KEY = "sdk-key"; + private static final ClientContext BASIC_CONTEXT = new ClientContext(SDK_KEY); + + @Test + public void testDefaults() { + LoggingConfiguration c = Components.logging().build(BASIC_CONTEXT); + assertEquals(LoggingConfigurationBuilder.DEFAULT_LOG_DATA_SOURCE_OUTAGE_AS_ERROR_AFTER, + c.getLogDataSourceOutageAsErrorAfter()); + } + + @Test + public void logDataSourceOutageAsErrorAfter() { + LoggingConfiguration c1 = Components.logging() + .logDataSourceOutageAsErrorAfter(Duration.ofMinutes(9)) + .build(BASIC_CONTEXT); + assertEquals(Duration.ofMinutes(9), c1.getLogDataSourceOutageAsErrorAfter()); + + LoggingConfiguration c2 = Components.logging() + .logDataSourceOutageAsErrorAfter(null) + .build(BASIC_CONTEXT); + assertNull(c2.getLogDataSourceOutageAsErrorAfter()); + } + + @Test + public void defaultLogAdapterIsNotSLF4J() { + LoggingConfiguration c = Components.logging() + .build(BASIC_CONTEXT); + assertThat(c.getLogAdapter().getClass().getCanonicalName(), + not(startsWith("com.launchdarkly.logging.LDSLF4J"))); + // Note that we're checking the class name here rather than comparing directly to + // LDSLF4J.adapter(), because calling that method isn't safe if you don't have + // SLF4J in the classpath. + } + + @Test + public void canSetLogAdapterAndLevel() { + LogCapture logSink = Logs.capture(); + LoggingConfiguration c = Components.logging() + .adapter(logSink) + .level(LDLogLevel.WARN) + .build(BASIC_CONTEXT); + LDLogger logger = LDLogger.withAdapter(c.getLogAdapter(), ""); + logger.debug("message 1"); + logger.info("message 2"); + logger.warn("message 3"); + logger.error("message 4"); + assertThat(logSink.getMessageStrings(), contains("WARN:message 3", "ERROR:message 4")); + } + + @Test + public void defaultLevelIsInfo() { + LogCapture logSink = Logs.capture(); + LoggingConfiguration c = Components.logging() + .adapter(logSink) + .build(BASIC_CONTEXT); + LDLogger logger = LDLogger.withAdapter(c.getLogAdapter(), ""); + logger.debug("message 1"); + logger.info("message 2"); + logger.warn("message 3"); + logger.error("message 4"); + assertThat(logSink.getMessageStrings(), contains("INFO:message 2", "WARN:message 3", "ERROR:message 4")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/MockPersistentDataStore.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/MockPersistentDataStore.java new file mode 100644 index 0000000..30a0252 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/MockPersistentDataStore.java @@ -0,0 +1,167 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableList; +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; + +import java.io.IOException; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +@SuppressWarnings("javadoc") +public final class MockPersistentDataStore implements PersistentDataStore { + public static final class MockDatabaseInstance { + Map>> dataByPrefix = new HashMap<>(); + Map initedByPrefix = new HashMap<>(); + } + + public final Map> data; + public final AtomicBoolean inited; + public final AtomicInteger initedCount = new AtomicInteger(0); + public volatile int initedQueryCount; + public volatile int getQueryCount; + public volatile boolean persistOnlyAsString; + public volatile boolean unavailable; + public volatile RuntimeException fakeError; + public volatile Runnable updateHook; + + public MockPersistentDataStore() { + this.data = new HashMap<>(); + this.inited = new AtomicBoolean(); + } + + public MockPersistentDataStore(MockDatabaseInstance sharedData, String prefix) { + synchronized (sharedData) { + if (sharedData.dataByPrefix.containsKey(prefix)) { + this.data = sharedData.dataByPrefix.get(prefix); + this.inited = sharedData.initedByPrefix.get(prefix); + } else { + this.data = new HashMap<>(); + this.inited = new AtomicBoolean(); + sharedData.dataByPrefix.put(prefix, this.data); + sharedData.initedByPrefix.put(prefix, this.inited); + } + } + } + + @Override + public void close() throws IOException { + } + + @Override + public SerializedItemDescriptor get(DataKind kind, String key) { + getQueryCount++; + maybeThrow(); + if (data.containsKey(kind)) { + SerializedItemDescriptor item = data.get(kind).get(key); + if (item != null) { + if (persistOnlyAsString) { + // This simulates the kind of store implementation that can't track metadata separately + return new SerializedItemDescriptor(0, false, item.getSerializedItem()); + } else { + return item; + } + } + } + return null; + } + + @Override + public KeyedItems getAll(DataKind kind) { + maybeThrow(); + return data.containsKey(kind) ? new KeyedItems<>(ImmutableList.copyOf(data.get(kind).entrySet())) : new KeyedItems<>(null); + } + + @Override + public void init(FullDataSet allData) { + initedCount.incrementAndGet(); + maybeThrow(); + data.clear(); + for (Map.Entry> entry: allData.getData()) { + DataKind kind = entry.getKey(); + HashMap items = new LinkedHashMap<>(); + for (Map.Entry e: entry.getValue().getItems()) { + items.put(e.getKey(), storableItem(kind, e.getValue())); + } + data.put(kind, items); + } + inited.set(true); + } + + @Override + public boolean upsert(DataKind kind, String key, SerializedItemDescriptor item) { + maybeThrow(); + if (updateHook != null) { + updateHook.run(); + } + if (!data.containsKey(kind)) { + data.put(kind, new HashMap<>()); + } + Map items = data.get(kind); + SerializedItemDescriptor oldItem = items.get(key); + if (oldItem != null) { + // If persistOnlyAsString is true, simulate the kind of implementation where we can't see the + // version as a separate attribute in the database and must deserialize the item to get it. + int oldVersion = persistOnlyAsString ? + kind.deserialize(oldItem.getSerializedItem()).getVersion() : + oldItem.getVersion(); + if (oldVersion >= item.getVersion()) { + return false; + } + } + items.put(key, storableItem(kind, item)); + return true; + } + + @Override + public boolean isInitialized() { + maybeThrow(); + initedQueryCount++; + return inited.get(); + } + + @Override + public boolean isStoreAvailable() { + return !unavailable; + } + + public void forceSet(DataKind kind, TestItem item) { + forceSet(kind, item.key, item.toSerializedItemDescriptor()); + } + + public void forceSet(DataKind kind, String key, SerializedItemDescriptor item) { + if (!data.containsKey(kind)) { + data.put(kind, new HashMap<>()); + } + Map items = data.get(kind); + items.put(key, storableItem(kind, item)); + } + + public void forceRemove(DataKind kind, String key) { + if (data.containsKey(kind)) { + data.get(kind).remove(key); + } + } + + private SerializedItemDescriptor storableItem(DataKind kind, SerializedItemDescriptor item) { + if (item.isDeleted() && !persistOnlyAsString) { + // This simulates the kind of store implementation that *can* track metadata separately, so we don't + // have to persist the placeholder string for deleted items + return new SerializedItemDescriptor(item.getVersion(), true, null); + } + return item; + } + + private void maybeThrow() { + if (fakeError != null) { + throw fakeError; + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilderTest.java new file mode 100644 index 0000000..b12db52 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreBuilderTest.java @@ -0,0 +1,64 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder.StaleValuesPolicy; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.Components.persistentDataStore; +import static com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder.DEFAULT_CACHE_TTL; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class PersistentDataStoreBuilderTest { + private static final ComponentConfigurer factory = context -> null; + + @Test + public void factory() { + assertSame(factory, persistentDataStore(factory).persistentDataStoreConfigurer); + } + + @Test + public void cacheTime() { + assertEquals(DEFAULT_CACHE_TTL, persistentDataStore(factory).cacheTime); + + assertEquals(Duration.ofMinutes(3), persistentDataStore(factory).cacheTime(Duration.ofMinutes(3)).cacheTime); + + assertEquals(Duration.ofMillis(3), persistentDataStore(factory).cacheMillis(3).cacheTime); + + assertEquals(Duration.ofSeconds(3), persistentDataStore(factory).cacheSeconds(3).cacheTime); + + assertEquals(DEFAULT_CACHE_TTL, + persistentDataStore(factory).cacheTime(Duration.ofMinutes(3)).cacheTime(null).cacheTime); + + assertEquals(Duration.ZERO, persistentDataStore(factory).noCaching().cacheTime); + + assertEquals(Duration.ofMillis(-1), persistentDataStore(factory).cacheForever().cacheTime); + } + + @Test + public void staleValuesPolicy() { + assertEquals(StaleValuesPolicy.EVICT, persistentDataStore(factory).staleValuesPolicy); + + assertEquals(StaleValuesPolicy.REFRESH, + persistentDataStore(factory).staleValuesPolicy(StaleValuesPolicy.REFRESH).staleValuesPolicy); + + assertEquals(StaleValuesPolicy.EVICT, + persistentDataStore(factory).staleValuesPolicy(StaleValuesPolicy.REFRESH).staleValuesPolicy(null).staleValuesPolicy); + } + + @Test + public void recordCacheStats() { + assertFalse(persistentDataStore(factory).recordCacheStats); + + assertTrue(persistentDataStore(factory).recordCacheStats(true).recordCacheStats); + + assertFalse(persistentDataStore(factory).recordCacheStats(true).recordCacheStats(false).recordCacheStats); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreGenericTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreGenericTest.java new file mode 100644 index 0000000..b31df60 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreGenericTest.java @@ -0,0 +1,74 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableList; +import com.launchdarkly.sdk.server.TestComponents; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * This verifies that PersistentDataStoreTestBase behaves as expected as long as the PersistentDataStore + * implementation behaves as expected. Since there aren't any actual database integrations built into the + * SDK project, and PersistentDataStoreTestBase will be used by external projects like java-server-sdk-redis, + * we want to make sure the test logic is correct regardless of database implementation details. + * + * PersistentDataStore implementations may be able to persist the version and deleted state as metadata + * separate from the serialized item string; or they may not, in which case a little extra parsing is + * necessary. MockPersistentDataStore is able to simulate both of these scenarios, and we test both here. + */ +@SuppressWarnings("javadoc") +@RunWith(Parameterized.class) +public class PersistentDataStoreGenericTest extends PersistentDataStoreTestBase { + private final MockPersistentDataStore.MockDatabaseInstance sharedData = new MockPersistentDataStore.MockDatabaseInstance(); + private final TestMode testMode; + + static class TestMode { + final boolean persistOnlyAsString; + + TestMode(boolean persistOnlyAsString) { + this.persistOnlyAsString = persistOnlyAsString; + } + + @Override + public String toString() { + return "TestMode(" + (persistOnlyAsString ? "persistOnlyAsString" : "persistWithMetadata") + ")"; + } + } + + @Parameters(name="{0}") + public static Iterable data() { + return ImmutableList.of( + new TestMode(false), + new TestMode(true) + ); + } + + public PersistentDataStoreGenericTest(TestMode testMode) { + this.testMode = testMode; + } + + @Override + protected ComponentConfigurer buildStore(String prefix) { + MockPersistentDataStore store = new MockPersistentDataStore(sharedData, prefix); + store.persistOnlyAsString = testMode.persistOnlyAsString; + return TestComponents.specificComponent(store); + } + + @Override + protected void clearAllData() { + synchronized (sharedData) { + for (String prefix: sharedData.dataByPrefix.keySet()) { + sharedData.dataByPrefix.get(prefix).clear(); + } + } + } + + @Override + protected boolean setUpdateHook(MockPersistentDataStore storeUnderTest, Runnable hook) { + storeUnderTest.updateHook = hook; + return true; + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreTestBase.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreTestBase.java new file mode 100644 index 0000000..9889fe8 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PersistentDataStoreTestBase.java @@ -0,0 +1,378 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.BaseTest; +import com.launchdarkly.sdk.server.DataStoreTestTypes.DataBuilder; +import com.launchdarkly.sdk.server.DataStoreTestTypes.TestItem; +import com.launchdarkly.sdk.server.subsystems.ClientContext; +import com.launchdarkly.sdk.server.subsystems.ComponentConfigurer; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.PersistentDataStore; + +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; + +import static com.launchdarkly.sdk.server.DataStoreTestTypes.OTHER_TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.TEST_ITEMS; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toItemsMap; +import static com.launchdarkly.sdk.server.DataStoreTestTypes.toSerialized; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; + +/** + * Similar to FeatureStoreTestBase, but exercises only the underlying database implementation of a persistent + * data store. The caching behavior, which is entirely implemented by CachingStoreWrapper, is covered by + * CachingStoreWrapperTest. + * + * @param the type of the store this base test will use + */ +@SuppressWarnings("javadoc") +public abstract class PersistentDataStoreTestBase extends BaseTest { + protected T store; + + protected TestItem item1 = new TestItem("key1", "first", 10); + + protected TestItem item2 = new TestItem("key2", "second", 10); + + protected TestItem otherItem1 = new TestItem("key1", "other-first", 11); + + private ClientContext makeClientContext() { + return clientContext("", baseConfig().build()); + } + + @SuppressWarnings("unchecked") + private T makeConfiguredStore() { + return (T)buildStore(null).build(makeClientContext()); + } + + @SuppressWarnings("unchecked") + private T makeConfiguredStoreWithPrefix(String prefix) { + ComponentConfigurer builder = buildStore(prefix); + if (builder == null) { + return null; + } + return (T)builder.build(makeClientContext()); + } + + /** + * Test subclasses should override this method to prepare an instance of the data store class. + * They are allowed to return null if {@code prefix} is non-null and they do not support prefixes. + * + * @param prefix a database prefix or null + * @return a factory for creating the data store + */ + protected ComponentConfigurer buildStore(String prefix) { + return null; + } + + /** + * Test classes should override this to clear all data from the underlying database. + */ + protected abstract void clearAllData(); + + /** + * Test classes should override this (and return true) if it is possible to instrument the feature + * store to execute the specified Runnable during an upsert operation, for concurrent modification tests. + */ + protected boolean setUpdateHook(T storeUnderTest, Runnable hook) { + return false; + } + + private void assertEqualsSerializedItem(TestItem item, SerializedItemDescriptor serializedItemDesc) { + // This allows for the fact that a PersistentDataStore may not be able to get the item version without + // deserializing it, so we allow the version to be zero. + assertEquals(item.toSerializedItemDescriptor().getSerializedItem(), serializedItemDesc.getSerializedItem()); + if (serializedItemDesc.getVersion() != 0) { + assertEquals(item.version, serializedItemDesc.getVersion()); + } + } + + private void assertEqualsDeletedItem(SerializedItemDescriptor expected, SerializedItemDescriptor serializedItemDesc) { + // As above, the PersistentDataStore may not have separate access to the version and deleted state; + // PersistentDataStoreWrapper compensates for this when it deserializes the item. + if (serializedItemDesc.getSerializedItem() == null) { + assertTrue(serializedItemDesc.isDeleted()); + assertEquals(expected.getVersion(), serializedItemDesc.getVersion()); + } else { + ItemDescriptor itemDesc = TEST_ITEMS.deserialize(serializedItemDesc.getSerializedItem()); + assertEquals(ItemDescriptor.deletedItem(expected.getVersion()), itemDesc); + } + } + + @Before + public void setup() { + store = makeConfiguredStore(); + } + + @After + public void teardown() throws Exception { + if (store != null) { + store.close(); + } + } + + @Test + public void storeNotInitializedBeforeInit() { + clearAllData(); + assertFalse(store.isInitialized()); + } + + @Test + public void storeInitializedAfterInit() { + store.init(new DataBuilder().buildSerialized()); + assertTrue(store.isInitialized()); + } + + @Test + public void initCompletelyReplacesPreviousData() { + clearAllData(); + + FullDataSet allData = + new DataBuilder().add(TEST_ITEMS, item1, item2).add(OTHER_TEST_ITEMS, otherItem1).buildSerialized(); + store.init(allData); + + TestItem item2v2 = item2.withVersion(item2.version + 1); + allData = new DataBuilder().add(TEST_ITEMS, item2v2).add(OTHER_TEST_ITEMS).buildSerialized(); + store.init(allData); + + assertNull(store.get(TEST_ITEMS, item1.key)); + assertEqualsSerializedItem(item2v2, store.get(TEST_ITEMS, item2.key)); + assertNull(store.get(OTHER_TEST_ITEMS, otherItem1.key)); + } + + @Test + public void oneInstanceCanDetectIfAnotherInstanceHasInitializedTheStore() { + clearAllData(); + T store2 = makeConfiguredStore(); + + assertFalse(store.isInitialized()); + + store2.init(new DataBuilder().add(TEST_ITEMS, item1).buildSerialized()); + + assertTrue(store.isInitialized()); + } + + @Test + public void oneInstanceCanDetectIfAnotherInstanceHasInitializedTheStoreEvenIfEmpty() { + clearAllData(); + T store2 = makeConfiguredStore(); + + assertFalse(store.isInitialized()); + + store2.init(new DataBuilder().buildSerialized()); + + assertTrue(store.isInitialized()); + } + + @Test + public void getExistingItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + assertEqualsSerializedItem(item1, store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void getNonexistingItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + assertNull(store.get(TEST_ITEMS, "biz")); + } + + @Test + public void getAll() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).add(OTHER_TEST_ITEMS, otherItem1).buildSerialized()); + Map items = toItemsMap(store.getAll(TEST_ITEMS)); + assertEquals(2, items.size()); + assertEqualsSerializedItem(item1, items.get(item1.key)); + assertEqualsSerializedItem(item2, items.get(item2.key)); + } + + @Test + public void getAllWithDeletedItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + SerializedItemDescriptor deletedItem = toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(item1.version + 1)); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + Map items = toItemsMap(store.getAll(TEST_ITEMS)); + assertEquals(2, items.size()); + assertEqualsSerializedItem(item2, items.get(item2.key)); + assertEqualsDeletedItem(deletedItem, items.get(item1.key)); + } + + @Test + public void upsertWithNewerVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + TestItem newVer = item1.withVersion(item1.version + 1).withName("modified"); + store.upsert(TEST_ITEMS, item1.key, newVer.toSerializedItemDescriptor()); + assertEqualsSerializedItem(newVer, store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void upsertWithOlderVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + TestItem oldVer = item1.withVersion(item1.version - 1).withName("modified"); + store.upsert(TEST_ITEMS, item1.key, oldVer.toSerializedItemDescriptor()); + assertEqualsSerializedItem(item1, store.get(TEST_ITEMS, oldVer.key)); + } + + @Test + public void upsertNewItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + TestItem newItem = new TestItem("new-name", "new-key", 99); + store.upsert(TEST_ITEMS, newItem.key, newItem.toSerializedItemDescriptor()); + assertEqualsSerializedItem(newItem, store.get(TEST_ITEMS, newItem.key)); + } + + @Test + public void deleteWithNewerVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + SerializedItemDescriptor deletedItem = toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(item1.version + 1)); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + assertEqualsDeletedItem(deletedItem, store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void deleteWithOlderVersion() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + SerializedItemDescriptor deletedItem = toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(item1.version - 1)); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + assertEqualsSerializedItem(item1, store.get(TEST_ITEMS, item1.key)); + } + + @Test + public void deleteUnknownItem() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + SerializedItemDescriptor deletedItem = toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(11)); + store.upsert(TEST_ITEMS, "deleted-key", deletedItem); + assertEqualsDeletedItem(deletedItem, store.get(TEST_ITEMS, "deleted-key")); + } + + @Test + public void upsertOlderVersionAfterDelete() { + store.init(new DataBuilder().add(TEST_ITEMS, item1, item2).buildSerialized()); + SerializedItemDescriptor deletedItem = toSerialized(TEST_ITEMS, ItemDescriptor.deletedItem(item1.version + 1)); + store.upsert(TEST_ITEMS, item1.key, deletedItem); + store.upsert(TEST_ITEMS, item1.key, item1.toSerializedItemDescriptor()); + assertEqualsDeletedItem(deletedItem, store.get(TEST_ITEMS, item1.key)); + } + + // The following two tests verify that the update version checking logic works correctly when + // another client instance is modifying the same data. They will run only if the test class + // supports setUpdateHook(). + + @Test + public void handlesUpsertRaceConditionAgainstExternalClientWithLowerVersion() throws Exception { + final T store2 = makeConfiguredStore(); + + int startVersion = 1; + final int store2VersionStart = 2; + final int store2VersionEnd = 4; + int store1VersionEnd = 10; + + final TestItem startItem = new TestItem("me", "foo", startVersion); + + Runnable concurrentModifier = new Runnable() { + int versionCounter = store2VersionStart; + public void run() { + if (versionCounter <= store2VersionEnd) { + store2.upsert(TEST_ITEMS, startItem.key, startItem.withVersion(versionCounter).toSerializedItemDescriptor()); + versionCounter++; + } + } + }; + + try { + assumeTrue(setUpdateHook(store, concurrentModifier)); + + store.init(new DataBuilder().add(TEST_ITEMS, startItem).buildSerialized()); + + TestItem store1End = startItem.withVersion(store1VersionEnd); + store.upsert(TEST_ITEMS, startItem.key, store1End.toSerializedItemDescriptor()); + + SerializedItemDescriptor result = store.get(TEST_ITEMS, startItem.key); + assertEqualsSerializedItem(startItem.withVersion(store1VersionEnd), result); + } finally { + store2.close(); + } + } + + @Test + public void handlesUpsertRaceConditionAgainstExternalClientWithHigherVersion() throws Exception { + final T store2 = makeConfiguredStore(); + + int startVersion = 1; + final int store2Version = 3; + int store1VersionEnd = 2; + + final TestItem startItem = new TestItem("me", "foo", startVersion); + + Runnable concurrentModifier = new Runnable() { + public void run() { + store2.upsert(TEST_ITEMS, startItem.key, startItem.withVersion(store2Version).toSerializedItemDescriptor()); + } + }; + + try { + assumeTrue(setUpdateHook(store, concurrentModifier)); + + store.init(new DataBuilder().add(TEST_ITEMS, startItem).buildSerialized()); + + TestItem store1End = startItem.withVersion(store1VersionEnd); + store.upsert(TEST_ITEMS, startItem.key, store1End.toSerializedItemDescriptor()); + + SerializedItemDescriptor result = store.get(TEST_ITEMS, startItem.key); + assertEqualsSerializedItem(startItem.withVersion(store2Version), result); + } finally { + store2.close(); + } + } + + @Test + public void storesWithDifferentPrefixAreIndependent() throws Exception { + T store1 = makeConfiguredStoreWithPrefix("aaa"); + Assume.assumeNotNull(store1); + T store2 = makeConfiguredStoreWithPrefix("bbb"); + clearAllData(); + + try { + assertFalse(store1.isInitialized()); + assertFalse(store2.isInitialized()); + + TestItem item1a = new TestItem("a1", "flag-a", 1); + TestItem item1b = new TestItem("b", "flag-b", 1); + TestItem item2a = new TestItem("a2", "flag-a", 2); + TestItem item2c = new TestItem("c", "flag-c", 2); + + store1.init(new DataBuilder().add(TEST_ITEMS, item1a, item1b).buildSerialized()); + assertTrue(store1.isInitialized()); + assertFalse(store2.isInitialized()); + + store2.init(new DataBuilder().add(TEST_ITEMS, item2a, item2c).buildSerialized()); + assertTrue(store1.isInitialized()); + assertTrue(store2.isInitialized()); + + Map items1 = toItemsMap(store1.getAll(TEST_ITEMS)); + Map items2 = toItemsMap(store2.getAll(TEST_ITEMS)); + assertEquals(2, items1.size()); + assertEquals(2, items2.size()); + assertEqualsSerializedItem(item1a, items1.get(item1a.key)); + assertEqualsSerializedItem(item1b, items1.get(item1b.key)); + assertEqualsSerializedItem(item2a, items2.get(item2a.key)); + assertEqualsSerializedItem(item2c, items2.get(item2c.key)); + + assertEqualsSerializedItem(item1a, store1.get(TEST_ITEMS, item1a.key)); + assertEqualsSerializedItem(item1b, store1.get(TEST_ITEMS, item1b.key)); + assertEqualsSerializedItem(item2a, store2.get(TEST_ITEMS, item2a.key)); + assertEqualsSerializedItem(item2c, store2.get(TEST_ITEMS, item2c.key)); + } finally { + store1.close(); + store2.close(); + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilderTest.java new file mode 100644 index 0000000..2de3e12 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/PollingDataSourceBuilderTest.java @@ -0,0 +1,37 @@ +package com.launchdarkly.sdk.server.integrations; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.Components.pollingDataSource; +import static com.launchdarkly.sdk.server.integrations.PollingDataSourceBuilder.DEFAULT_POLL_INTERVAL; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class PollingDataSourceBuilderTest { + @Test + public void pollInterval() { + assertEquals(DEFAULT_POLL_INTERVAL, pollingDataSource().pollInterval); + + assertEquals(Duration.ofMinutes(7), + pollingDataSource().pollInterval(Duration.ofMinutes(7)).pollInterval); + + assertEquals(DEFAULT_POLL_INTERVAL, + pollingDataSource().pollInterval(Duration.ofMinutes(7)).pollInterval(null).pollInterval); + + assertEquals(DEFAULT_POLL_INTERVAL, + pollingDataSource().pollInterval(Duration.ofMillis(1)).pollInterval); + } + + @Test + public void testPayloadFilter() { + assertEquals(null, pollingDataSource().payloadFilter); + + assertEquals("aFilter", + pollingDataSource().payloadFilter("aFilter").payloadFilter); + + assertEquals(null, + pollingDataSource().payloadFilter("aFilter").payloadFilter(null).payloadFilter); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilderTest.java new file mode 100644 index 0000000..44f0492 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/ServiceEndpointsBuilderTest.java @@ -0,0 +1,84 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.ServiceEndpoints; + +import java.net.URI; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class ServiceEndpointsBuilderTest { + @Test + public void usesAllDefaultUrisIfNoneAreOverridden() { + ServiceEndpoints se = Components.serviceEndpoints().createServiceEndpoints(); + assertEquals(URI.create("https://stream.launchdarkly.com"), se.getStreamingBaseUri()); + assertEquals(URI.create("https://app.launchdarkly.com"), se.getPollingBaseUri()); + assertEquals(URI.create("https://events.launchdarkly.com"), se.getEventsBaseUri()); + } + + @Test + public void canSetAllUrisToCustomValues() { + URI su = URI.create("https://my-streaming"); + URI pu = URI.create("https://my-polling"); + URI eu = URI.create("https://my-events"); + ServiceEndpoints se = Components.serviceEndpoints() + .streaming(su) + .polling(pu) + .events(eu) + .createServiceEndpoints(); + assertEquals(su, se.getStreamingBaseUri()); + assertEquals(pu, se.getPollingBaseUri()); + assertEquals(eu, se.getEventsBaseUri()); + } + + @Test + public void ifCustomUrisAreSetAnyUnsetOnesDefaultToNull() { + URI su = URI.create("https://my-streaming"); + URI pu = URI.create("https://my-polling"); + URI eu = URI.create("https://my-events"); + ServiceEndpoints se1 = Components.serviceEndpoints().streaming(su).createServiceEndpoints(); + assertEquals(su, se1.getStreamingBaseUri()); + assertNull(se1.getPollingBaseUri()); + assertNull(se1.getEventsBaseUri()); + + ServiceEndpoints se2 = Components.serviceEndpoints().polling(pu).createServiceEndpoints(); + assertNull(se2.getStreamingBaseUri()); + assertEquals(pu, se2.getPollingBaseUri()); + assertNull(se2.getEventsBaseUri()); + + ServiceEndpoints se3 = Components.serviceEndpoints().events(eu).createServiceEndpoints(); + assertNull(se3.getStreamingBaseUri()); + assertNull(se3.getPollingBaseUri()); + assertEquals(eu, se3.getEventsBaseUri()); + } + + @Test + public void settingRelayProxyUriSetsAllUris() { + URI customRelay = URI.create("http://my-relay"); + ServiceEndpoints se = Components.serviceEndpoints().relayProxy(customRelay).createServiceEndpoints(); + assertEquals(customRelay, se.getStreamingBaseUri()); + assertEquals(customRelay, se.getPollingBaseUri()); + assertEquals(customRelay, se.getEventsBaseUri()); + } + + @Test + public void stringSettersAreEquivalentToUriSetters() { + String su = "https://my-streaming"; + String pu = "https://my-polling"; + String eu = "https://my-events"; + ServiceEndpoints se1 = Components.serviceEndpoints().streaming(su).polling(pu).events(eu).createServiceEndpoints(); + assertEquals(URI.create(su), se1.getStreamingBaseUri()); + assertEquals(URI.create(pu), se1.getPollingBaseUri()); + assertEquals(URI.create(eu), se1.getEventsBaseUri()); + + String ru = "http://my-relay"; + ServiceEndpoints se2 = Components.serviceEndpoints().relayProxy(ru).createServiceEndpoints(); + assertEquals(URI.create(ru), se2.getStreamingBaseUri()); + assertEquals(URI.create(ru), se2.getPollingBaseUri()); + assertEquals(URI.create(ru), se2.getEventsBaseUri()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilderTest.java new file mode 100644 index 0000000..71396a4 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/StreamingDataSourceBuilderTest.java @@ -0,0 +1,34 @@ +package com.launchdarkly.sdk.server.integrations; + +import org.junit.Test; + +import java.time.Duration; + +import static com.launchdarkly.sdk.server.Components.streamingDataSource; +import static com.launchdarkly.sdk.server.integrations.StreamingDataSourceBuilder.DEFAULT_INITIAL_RECONNECT_DELAY; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class StreamingDataSourceBuilderTest { + @Test + public void initialReconnectDelay() { + assertEquals(DEFAULT_INITIAL_RECONNECT_DELAY, streamingDataSource().initialReconnectDelay); + + assertEquals(Duration.ofMillis(222), + streamingDataSource().initialReconnectDelay(Duration.ofMillis(222)).initialReconnectDelay); + + assertEquals(DEFAULT_INITIAL_RECONNECT_DELAY, + streamingDataSource().initialReconnectDelay(Duration.ofMillis(222)).initialReconnectDelay(null).initialReconnectDelay); + } + + @Test + public void testPayloadFilter() { + assertEquals(null, streamingDataSource().payloadFilter); + + assertEquals("aFilter", + streamingDataSource().payloadFilter("aFilter").payloadFilter); + + assertEquals(null, + streamingDataSource().payloadFilter("aFilter").payloadFilter(null).payloadFilter); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataTest.java new file mode 100644 index 0000000..77e194d --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataTest.java @@ -0,0 +1,465 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.DataModel; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.ModelBuilders; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider; +import com.launchdarkly.sdk.server.subsystems.DataSource; +import com.launchdarkly.sdk.server.subsystems.DataSourceUpdateSink; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; + +import org.junit.Test; + +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.function.Function; + +import static com.google.common.collect.Iterables.get; +import static com.launchdarkly.sdk.server.ModelBuilders.flagBuilder; +import static com.launchdarkly.sdk.server.TestComponents.clientContext; +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +@SuppressWarnings("javadoc") +public class TestDataTest { + private static final LDValue[] THREE_STRING_VALUES = + new LDValue[] { LDValue.of("red"), LDValue.of("green"), LDValue.of("blue") }; + + private CapturingDataSourceUpdates updates = new CapturingDataSourceUpdates(); + + // Test implementation note: We're using the ModelBuilders test helpers to build the expected + // flag JSON. However, we have to use them in a slightly different way than we do in other tests + // (for instance, writing out an expected clause as a JSON literal), because specific data model + // classes like FeatureFlag and Clause aren't visible from the integrations package. + + @Test + public void initializesWithEmptyData() throws Exception { + TestData td = TestData.dataSource(); + DataSource ds = td.build(clientContext("", new LDConfig.Builder().build(), updates)); + Future started = ds.start(); + + assertThat(started.isDone(), is(true)); + assertThat(updates.valid, is(true)); + + assertThat(updates.inits.size(), equalTo(1)); + FullDataSet data = updates.inits.take(); + assertThat(data.getData(), iterableWithSize(1)); + assertThat(get(data.getData(), 0).getKey(), equalTo(DataModel.FEATURES)); + assertThat(get(data.getData(), 0).getValue().getItems(), emptyIterable()); + } + + @Test + public void initializesWithFlags() throws Exception { + TestData td = TestData.dataSource(); + + td.update(td.flag("flag1").on(true)) + .update(td.flag("flag2").on(false)); + + DataSource ds = td.build(clientContext("", new LDConfig.Builder().build(), updates)); + Future started = ds.start(); + + assertThat(started.isDone(), is(true)); + assertThat(updates.valid, is(true)); + + assertThat(updates.inits.size(), equalTo(1)); + FullDataSet data = updates.inits.take(); + assertThat(data.getData(), iterableWithSize(1)); + assertThat(get(data.getData(), 0).getKey(), equalTo(DataModel.FEATURES)); + assertThat(get(data.getData(), 0).getValue().getItems(), iterableWithSize(2)); + + ModelBuilders.FlagBuilder expectedFlag1 = flagBuilder("flag1").version(1).salt("") + .on(true).offVariation(1).fallthroughVariation(0).variations(true, false); + ModelBuilders.FlagBuilder expectedFlag2 = flagBuilder("flag2").version(1).salt("") + .on(false).offVariation(1).fallthroughVariation(0).variations(true, false); + + Map flags = ImmutableMap.copyOf(get(data.getData(), 0).getValue().getItems()); + ItemDescriptor flag1 = flags.get("flag1"); + ItemDescriptor flag2 = flags.get("flag2"); + assertThat(flag1, not(nullValue())); + assertThat(flag2, not(nullValue())); + + assertJsonEquals(flagJson(expectedFlag1, 1), flagJson(flag1)); + assertJsonEquals(flagJson(expectedFlag2, 1), flagJson(flag2)); + } + + @Test + public void addsFlag() throws Exception { + TestData td = TestData.dataSource(); + DataSource ds = td.build(clientContext("", new LDConfig.Builder().build(), updates)); + Future started = ds.start(); + + assertThat(started.isDone(), is(true)); + assertThat(updates.valid, is(true)); + + td.update(td.flag("flag1").on(true)); + + ModelBuilders.FlagBuilder expectedFlag = flagBuilder("flag1").version(1).salt("") + .on(true).offVariation(1).fallthroughVariation(0).variations(true, false); + + assertThat(updates.upserts.size(), equalTo(1)); + UpsertParams up = updates.upserts.take(); + assertThat(up.kind, is(DataModel.FEATURES)); + assertThat(up.key, equalTo("flag1")); + ItemDescriptor flag1 = up.item; + + assertJsonEquals(flagJson(expectedFlag, 2), flagJson(flag1)); + } + + @Test + public void updatesFlag() throws Exception { + TestData td = TestData.dataSource(); + td.update(td.flag("flag1") + .on(false) + .variationForUser("a", true) + .ifMatch("name", LDValue.of("Lucy")).thenReturn(true)); + // Here we're verifying that the original targets & rules are copied over if we didn't change them + + ModelBuilders.FlagBuilder expectedFlag = flagBuilder("flag1").version(1).salt("") + .on(false).offVariation(1).fallthroughVariation(0).variations(true, false) + .addTarget(0, "a").addContextTarget(ContextKind.DEFAULT, 0) + .addRule("rule0", 0, "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}"); + + DataSource ds = td.build(clientContext("", new LDConfig.Builder().build(), updates)); + Future started = ds.start(); + + assertThat(started.isDone(), is(true)); + assertThat(updates.valid, is(true)); + + td.update(td.flag("flag1").on(true)); + + assertThat(updates.upserts.size(), equalTo(1)); + UpsertParams up = updates.upserts.take(); + assertThat(up.kind, is(DataModel.FEATURES)); + assertThat(up.key, equalTo("flag1")); + ItemDescriptor flag1 = up.item; + + expectedFlag.on(true).version(2); + assertJsonEquals(flagJson(expectedFlag, 2), flagJson(flag1)); + } + + @Test + public void flagConfigSimpleBoolean() throws Exception { + Function expectedBooleanFlag = fb -> + fb.on(true).variations(true, false).offVariation(1).fallthroughVariation(0); + + verifyFlag(f -> f, expectedBooleanFlag); + verifyFlag(f -> f.booleanFlag(), expectedBooleanFlag); // already the default + verifyFlag(f -> f.on(true), expectedBooleanFlag); // already the default + verifyFlag(f -> f.on(false), fb -> expectedBooleanFlag.apply(fb).on(false)); + verifyFlag(f -> f.variationForAll(false), fb -> expectedBooleanFlag.apply(fb).fallthroughVariation(1)); + verifyFlag(f -> f.variationForAll(true), expectedBooleanFlag); // already the default + verifyFlag(f -> f.fallthroughVariation(true).offVariation(false), expectedBooleanFlag); // already the default + + verifyFlag( + f -> f.fallthroughVariation(false).offVariation(true), + fb -> expectedBooleanFlag.apply(fb).fallthroughVariation(1).offVariation(0) + ); + } + + @Test + public void usingBooleanConfigMethodsForcesFlagToBeBoolean() throws Exception { + Function expectedBooleanFlag = fb -> + fb.on(true).variations(true, false).offVariation(1).fallthroughVariation(0); + + verifyFlag( + f -> f.variations(LDValue.of(1), LDValue.of(2)).booleanFlag(), + expectedBooleanFlag + ); + verifyFlag( + f -> f.variations(LDValue.of(true), LDValue.of(2)).booleanFlag(), + expectedBooleanFlag + ); + verifyFlag( + f -> f.booleanFlag(), + expectedBooleanFlag + ); + } + + @Test + public void flagConfigStringVariations() throws Exception { + verifyFlag( + f -> f.variations(THREE_STRING_VALUES).offVariation(0).fallthroughVariation(2), + fb -> fb.variations("red", "green", "blue").on(true).offVariation(0).fallthroughVariation(2) + ); + } + + @Test + public void flagConfigSamplingRatio() throws Exception { + verifyFlag( + f -> f.samplingRatio(2).on(false), + fb -> fb.samplingRatio(2).fallthroughVariation(0).variations(true,false).offVariation(1) + ); + } + + @Test + public void flagConfigMigrationCheckRatio() throws Exception { + verifyFlag( + f -> f.migrationCheckRatio(2).on(false), + fb -> fb.migration(new ModelBuilders.MigrationBuilder().checkRatio(2).build()) + .fallthroughVariation(0).variations(true,false).offVariation(1) + ); + } + + @Test + public void userTargets() throws Exception { + Function expectedBooleanFlag = fb -> + fb.variations(true, false).on(true).offVariation(1).fallthroughVariation(0); + + verifyFlag( + f -> f.variationForUser("a", true).variationForUser("b", true), + fb -> expectedBooleanFlag.apply(fb).addTarget(0, "a", "b") + .addContextTarget(ContextKind.DEFAULT, 0) + ); + verifyFlag( + f -> f.variationForUser("a", true).variationForUser("a", true), + fb -> expectedBooleanFlag.apply(fb).addTarget(0, "a") + .addContextTarget(ContextKind.DEFAULT, 0) + ); + verifyFlag( + f -> f.variationForUser("a", true).variationForUser("a", false), + fb -> expectedBooleanFlag.apply(fb).addTarget(1, "a") + .addContextTarget(ContextKind.DEFAULT, 1) + ); + verifyFlag( + f -> f.variationForUser("a", false).variationForUser("b", true).variationForUser("c", false), + fb -> expectedBooleanFlag.apply(fb).addTarget(0, "b").addTarget(1, "a", "c") + .addContextTarget(ContextKind.DEFAULT, 0).addContextTarget(ContextKind.DEFAULT, 1) + ); + verifyFlag( + f -> f.variationForUser("a", true).variationForUser("b", true).variationForUser("a", false), + fb -> expectedBooleanFlag.apply(fb).addTarget(0, "b").addTarget(1, "a") + .addContextTarget(ContextKind.DEFAULT, 0).addContextTarget(ContextKind.DEFAULT, 1) + ); + + Function expectedStringFlag = fb -> + fb.variations("red", "green", "blue").on(true).offVariation(0).fallthroughVariation(2); + + verifyFlag( + f -> f.variations(THREE_STRING_VALUES).offVariation(0).fallthroughVariation(2) + .variationForUser("a", 2).variationForUser("b", 2), + fb -> expectedStringFlag.apply(fb).addTarget(2, "a", "b") + .addContextTarget(ContextKind.DEFAULT, 2) + ); + verifyFlag( + f -> f.variations(THREE_STRING_VALUES).offVariation(0).fallthroughVariation(2) + .variationForUser("a", 2).variationForUser("b", 1).variationForUser("c", 2), + fb -> expectedStringFlag.apply(fb).addTarget(1, "b").addTarget(2, "a", "c") + .addContextTarget(ContextKind.DEFAULT, 1).addContextTarget(ContextKind.DEFAULT, 2) + ); + + // clear previously set targets + verifyFlag( + f -> f.variationForUser("a", true).clearTargets(), + expectedBooleanFlag + ); + } + + @Test + public void contextTargets() throws Exception { + ContextKind kind1 = ContextKind.of("org"), kind2 = ContextKind.of("other"); + + Function expectedBooleanFlag = fb -> + fb.variations(true, false).on(true).offVariation(1).fallthroughVariation(0); + + verifyFlag( + f -> f.variationForKey(kind1, "a", true).variationForKey(kind1, "b", true), + fb -> expectedBooleanFlag.apply(fb).addContextTarget(kind1, 0, "a", "b") + ); + verifyFlag( + f -> f.variationForKey(kind1, "a", true).variationForKey(kind2, "a", true), + fb -> expectedBooleanFlag.apply(fb).addContextTarget(kind1, 0, "a").addContextTarget(kind2, 0, "a") + ); + verifyFlag( + f -> f.variationForKey(kind1, "a", true).variationForKey(kind1, "a", true), + fb -> expectedBooleanFlag.apply(fb).addContextTarget(kind1, 0, "a") + ); + verifyFlag( + f -> f.variationForKey(kind1, "a", true).variationForKey(kind1, "a", false), + fb -> expectedBooleanFlag.apply(fb).addContextTarget(kind1, 1, "a") + ); + + Function expectedStringFlag = fb -> + fb.variations("red", "green", "blue").on(true).offVariation(0).fallthroughVariation(2); + + verifyFlag( + f -> f.variations(THREE_STRING_VALUES).offVariation(0).fallthroughVariation(2) + .variationForKey(kind1, "a", 2).variationForKey(kind1, "b", 2), + fb -> expectedStringFlag.apply(fb).addContextTarget(kind1, 2, "a", "b") + ); + + // clear previously set targets + verifyFlag( + f -> f.variationForKey(kind1, "a", true).clearTargets(), + expectedBooleanFlag + ); + } + + @Test + public void flagRules() throws Exception { + Function expectedBooleanFlag = fb -> + fb.variations(true, false).on(true).offVariation(1).fallthroughVariation(0); + + // match that returns variation 0/true + Function matchReturnsVariation0 = fb -> + expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}"); + + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(true), + matchReturnsVariation0 + ); + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(0), + matchReturnsVariation0 + ); + + // match that returns variation 1/false + Function matchReturnsVariation1 = fb -> + expectedBooleanFlag.apply(fb).addRule("rule0", 1, + "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}"); + + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(false), + matchReturnsVariation1 + ); + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(1), + matchReturnsVariation1 + ); + + // negated match + verifyFlag( + f -> f.ifNotMatch("name", LDValue.of("Lucy")).thenReturn(true), + fb -> expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"],\"negate\":true}") + ); + + // context kinds + verifyFlag( + f -> f.ifMatch(ContextKind.of("org"), "name", LDValue.of("Catco")).thenReturn(true), + fb -> expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"org\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Catco\"]}") + ); + verifyFlag( + f -> f.ifNotMatch(ContextKind.of("org"), "name", LDValue.of("Catco")).thenReturn(true), + fb -> expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"org\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Catco\"],\"negate\":true}") + ); + + // multiple clauses + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")) + .andMatch("country", LDValue.of("gb")) + .thenReturn(true), + fb -> expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}", + "{\"contextKind\":\"user\",\"attribute\":\"country\",\"op\":\"in\",\"values\":[\"gb\"]}") + ); + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")) + .andMatch("country", LDValue.of("gb")) + .thenReturn(true), + fb -> expectedBooleanFlag.apply(fb).addRule("rule0", 0, + "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}", + "{\"contextKind\":\"user\",\"attribute\":\"country\",\"op\":\"in\",\"values\":[\"gb\"]}") + ); + + // multiple rules + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(true) + .ifMatch("name", LDValue.of("Mina")).thenReturn(false), + fb -> expectedBooleanFlag.apply(fb) + .addRule("rule0", 0, "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Lucy\"]}") + .addRule("rule1", 1, "{\"contextKind\":\"user\",\"attribute\":\"name\",\"op\":\"in\",\"values\":[\"Mina\"]}") + ); + + // clear previously set rules + verifyFlag( + f -> f.ifMatch("name", LDValue.of("Lucy")).thenReturn(true).clearRules(), + expectedBooleanFlag + ); + } + + private void verifyFlag( + Function configureFlag, + Function configureExpectedFlag + ) throws Exception { + ModelBuilders.FlagBuilder expectedFlag = flagBuilder("flagkey").version(1).salt(""); + expectedFlag = configureExpectedFlag.apply(expectedFlag); + + TestData td = TestData.dataSource(); + + DataSource ds = td.build(clientContext("", new LDConfig.Builder().build(), updates)); + ds.start(); + + td.update(configureFlag.apply(td.flag("flagkey"))); + + assertThat(updates.upserts.size(), equalTo(1)); + UpsertParams up = updates.upserts.take(); + ItemDescriptor flag = up.item; + assertJsonEquals(flagJson(expectedFlag, 1), flagJson(flag)); + } + + private static String flagJson(ModelBuilders.FlagBuilder flagBuilder, int version) { + return DataModel.FEATURES.serialize(new ItemDescriptor(version, flagBuilder.build())); + } + + private static String flagJson(ItemDescriptor flag) { + return DataModel.FEATURES.serialize(flag); + } + + private static class UpsertParams { + final DataKind kind; + final String key; + final ItemDescriptor item; + + UpsertParams(DataKind kind, String key, ItemDescriptor item) { + this.kind = kind; + this.key = key; + this.item = item; + } + } + + private static class CapturingDataSourceUpdates implements DataSourceUpdateSink { + BlockingQueue> inits = new LinkedBlockingQueue<>(); + BlockingQueue upserts = new LinkedBlockingQueue<>(); + boolean valid; + + @Override + public boolean init(FullDataSet allData) { + inits.add(allData); + return true; + } + + @Override + public boolean upsert(DataKind kind, String key, ItemDescriptor item) { + upserts.add(new UpsertParams(kind, key, item)); + return true; + } + + @Override + public DataStoreStatusProvider getDataStoreStatusProvider() { + return null; + } + + @Override + public void updateStatus(State newState, ErrorInfo newError) { + valid = newState == State.VALID; + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataWithClientTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataWithClientTest.java new file mode 100644 index 0000000..e063524 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/TestDataWithClientTest.java @@ -0,0 +1,126 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.LDClient; +import com.launchdarkly.sdk.server.LDConfig; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +@SuppressWarnings("javadoc") +public class TestDataWithClientTest { + private static final String SDK_KEY = "sdk-key"; + + private TestData td = TestData.dataSource(); + private LDConfig config = new LDConfig.Builder() + .dataSource(td) + .events(Components.noEvents()) + .build(); + + @Test + public void initializesWithEmptyData() throws Exception { + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.isInitialized(), is(true)); + } + } + + @Test + public void initializesWithFlag() throws Exception { + td.update(td.flag("flag").on(true)); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.boolVariation("flag", LDContext.create("user"), false), is(true)); + } + } + + @Test + public void updatesFlag() throws Exception { + td.update(td.flag("flag").on(false)); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.boolVariation("flag", LDContext.create("user"), false), is(false)); + + td.update(td.flag("flag").on(true)); + + assertThat(client.boolVariation("flag", LDContext.create("user"), false), is(true)); + } + } + + @Test + public void usesTargets() throws Exception { + td.update(td.flag("flag").fallthroughVariation(false).variationForUser("user1", true)); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.boolVariation("flag", LDContext.create("user1"), false), is(true)); + assertThat(client.boolVariation("flag", LDContext.create("user2"), false), is(false)); + } + } + + @Test + public void usesRules() throws Exception { + td.update(td.flag("flag").fallthroughVariation(false) + .ifMatch("name", LDValue.of("Lucy")).thenReturn(true) + .ifMatch("name", LDValue.of("Mina")).thenReturn(true)); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.boolVariation("flag", LDContext.builder("user1").name("Lucy").build(), false), is(true)); + assertThat(client.boolVariation("flag", LDContext.builder("user2").name("Mina").build(), false), is(true)); + assertThat(client.boolVariation("flag", LDContext.builder("user3").name("Quincy").build(), false), is(false)); + } + } + + @Test + public void nonBooleanFlags() throws Exception { + td.update(td.flag("flag").variations(LDValue.of("red"), LDValue.of("green"), LDValue.of("blue")) + .offVariation(0).fallthroughVariation(2) + .variationForUser("user1", 1) + .ifMatch("name", LDValue.of("Mina")).thenReturn(1)); + + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.stringVariation("flag", LDContext.builder("user1").name("Lucy").build(), ""), equalTo("green")); + assertThat(client.stringVariation("flag", LDContext.builder("user2").name("Mina").build(), ""), equalTo("green")); + assertThat(client.stringVariation("flag", LDContext.builder("user3").name("Quincy").build(), ""), equalTo("blue")); + + td.update(td.flag("flag").on(false)); + + assertThat(client.stringVariation("flag", LDContext.builder("user1").name("Lucy").build(), ""), equalTo("red")); + } + } + + @Test + public void canUpdateStatus() throws Exception { + try (LDClient client = new LDClient(SDK_KEY, config)) { + assertThat(client.getDataSourceStatusProvider().getStatus().getState(), equalTo(State.VALID)); + + ErrorInfo ei = ErrorInfo.fromHttpError(500); + td.updateStatus(State.INTERRUPTED, ei); + + assertThat(client.getDataSourceStatusProvider().getStatus().getState(), equalTo(State.INTERRUPTED)); + assertThat(client.getDataSourceStatusProvider().getStatus().getLastError(), equalTo(ei)); + } + } + + @Test + public void dataSourcePropagatesToMultipleClients() throws Exception { + td.update(td.flag("flag").on(true)); + + try (LDClient client1 = new LDClient(SDK_KEY, config)) { + try (LDClient client2 = new LDClient(SDK_KEY, config)) { + assertThat(client1.boolVariation("flag", LDContext.create("user"), false), is(true)); + assertThat(client2.boolVariation("flag", LDContext.create("user"), false), is(true)); + + td.update(td.flag("flag").on(false)); + + assertThat(client1.boolVariation("flag", LDContext.create("user"), false), is(false)); + assertThat(client2.boolVariation("flag", LDContext.create("user"), false), is(false)); + } + } + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilderTest.java new file mode 100644 index 0000000..53b4548 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/WrapperInfoBuilderTest.java @@ -0,0 +1,31 @@ +package com.launchdarkly.sdk.server.integrations; + +import com.launchdarkly.sdk.server.Components; +import com.launchdarkly.sdk.server.interfaces.ApplicationInfo; + +import com.launchdarkly.sdk.server.interfaces.WrapperInfo; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +@SuppressWarnings("javadoc") +public class WrapperInfoBuilderTest { + @Test + public void theDefaultInstanceContainsNullValues() { + WrapperInfo defaultInstance = Components.wrapperInfo() + .build(); + assertNull(defaultInstance.getWrapperName()); + assertNull(defaultInstance.getWrapperVersion()); + } + + @Test + public void setValuesAreReflectedInBuiltInstance() { + WrapperInfo clojureWrapper = Components.wrapperInfo() + .wrapperName("Clojure") + .wrapperVersion("0.0.1") + .build(); + assertEquals("Clojure", clojureWrapper.getWrapperName()); + assertEquals("0.0.1", clojureWrapper.getWrapperVersion()); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/package-info.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/package-info.java new file mode 100644 index 0000000..320ffae --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/integrations/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing general SDK integration functionality. + */ +package com.launchdarkly.sdk.server.integrations; diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/BigSegmentMembershipBuilderTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/BigSegmentMembershipBuilderTest.java new file mode 100644 index 0000000..ea0941e --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/BigSegmentMembershipBuilderTest.java @@ -0,0 +1,125 @@ +package com.launchdarkly.sdk.server.interfaces; + +import static com.launchdarkly.sdk.server.TestUtil.assertFullyEqual; +import static com.launchdarkly.sdk.server.TestUtil.assertFullyUnequal; +import static com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.createMembershipFromSegmentRefs; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.launchdarkly.sdk.server.subsystems.BigSegmentStoreTypes.Membership; + +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; + +@SuppressWarnings("javadoc") +public class BigSegmentMembershipBuilderTest { + + // MembershipBuilder is private to BigSegmentStoreTypes, we test it through + // createMembershipFromSegmentRefs + + @Test + public void empty() { + Membership m0 = createMembershipFromSegmentRefs(null, null); + Membership m1 = createMembershipFromSegmentRefs(Collections.emptyList(), null); + Membership m2 = createMembershipFromSegmentRefs(null, Collections.emptyList()); + + assertSame(m0, m1); + assertSame(m0, m2); + assertFullyEqual(m0, m1); + + assertNull(m0.checkMembership("arbitrary")); + } + + @Test + public void singleInclusion() { + Membership m0 = createMembershipFromSegmentRefs(Collections.singleton("key1"), null); + Membership m1 = createMembershipFromSegmentRefs(Collections.singleton("key1"), null); + + assertNotSame(m0, m1); + assertFullyEqual(m0, m1); + + assertTrue(m0.checkMembership("key1")); + assertNull(m0.checkMembership("key2")); + + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, Collections.singleton("key1"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Collections.singleton("key2"), null)); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, null)); + } + + @Test + public void multipleInclusions() { + Membership m0 = createMembershipFromSegmentRefs(Arrays.asList("key1", "key2"), null); + Membership m1 = createMembershipFromSegmentRefs(Arrays.asList("key2", "key1"), null); + + assertNotSame(m0, m1); + assertFullyEqual(m0, m1); + + assertTrue(m0.checkMembership("key1")); + assertTrue(m0.checkMembership("key2")); + assertNull(m0.checkMembership("key3")); + + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Arrays.asList("key1", "key2"), Collections.singleton("key3"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Arrays.asList("key1", "key3"), null)); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Collections.singleton("key1"), null)); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, null)); + } + + @Test + public void singleExclusion() { + Membership m0 = createMembershipFromSegmentRefs(null, Collections.singleton("key1")); + Membership m1 = createMembershipFromSegmentRefs(null, Collections.singleton("key1")); + + assertNotSame(m0, m1); + assertFullyEqual(m0, m1); + + assertFalse(m0.checkMembership("key1")); + assertNull(m0.checkMembership("key2")); + + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Collections.singleton("key1"), null)); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, Collections.singleton("key2"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, null)); + } + + @Test + public void multipleExclusions() { + Membership m0 = createMembershipFromSegmentRefs(null, Arrays.asList("key1", "key2")); + Membership m1 = createMembershipFromSegmentRefs(null, Arrays.asList("key2", "key1")); + + assertNotSame(m0, m1); + assertFullyEqual(m0, m1); + + assertFalse(m0.checkMembership("key1")); + assertFalse(m0.checkMembership("key2")); + assertNull(m0.checkMembership("key3")); + + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Collections.singleton("key3"), Arrays.asList("key1", "key2"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, Arrays.asList("key1", "key3"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, Collections.singleton("key1"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, null)); + } + + @Test + public void inclusionsAndExclusions() { + // key1 is included; key2 is included and excluded, therefore it's included; key3 is excluded + Membership m0 = createMembershipFromSegmentRefs(Arrays.asList("key1", "key2"), Arrays.asList("key2", "key3")); + Membership m1 = createMembershipFromSegmentRefs(Arrays.asList("key2", "key1"), Arrays.asList("key3", "key2")); + + assertNotSame(m0, m1); + assertFullyEqual(m0, m1); + + assertTrue(m0.checkMembership("key1")); + assertTrue(m0.checkMembership("key2")); + assertFalse(m0.checkMembership("key3")); + assertNull(m0.checkMembership("key4")); + + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Arrays.asList("key1", "key2"), Arrays.asList("key2", "key3", "key4"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Arrays.asList("key1", "key2", "key3"), Arrays.asList("key2", "key3"))); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(Collections.singleton("key1"), null)); + assertFullyUnequal(m0, createMembershipFromSegmentRefs(null, null)); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProviderTypesTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProviderTypesTest.java new file mode 100644 index 0000000..2fbf731 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataSourceStatusProviderTypesTest.java @@ -0,0 +1,110 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorInfo; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.ErrorKind; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.State; +import com.launchdarkly.sdk.server.interfaces.DataSourceStatusProvider.Status; +import com.launchdarkly.testhelpers.TypeBehavior; + +import org.junit.Test; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +@SuppressWarnings("javadoc") +public class DataSourceStatusProviderTypesTest { + @Test + public void statusProperties() { + Instant time = Instant.ofEpochMilli(10000); + ErrorInfo e = ErrorInfo.fromHttpError(401); + Status s = new Status(State.VALID, time, e); + assertThat(s.getState(), equalTo(State.VALID)); + assertThat(s.getStateSince(), equalTo(time)); + assertThat(s.getLastError(), sameInstance(e)); + } + + @Test + public void statusEquality() { + List> allPermutations = new ArrayList<>(); + for (State state: State.values()) { + for (Instant time: new Instant[] { Instant.ofEpochMilli(1000), Instant.ofEpochMilli(2000) }) { + for (ErrorInfo e: new ErrorInfo[] { null, ErrorInfo.fromHttpError(400), ErrorInfo.fromHttpError(401) }) { + allPermutations.add(() -> new Status(state, time, e)); + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void statusStringRepresentation() { + Status s1 = new Status(State.VALID, Instant.now(), null); + assertThat(s1.toString(), equalTo("Status(VALID," + s1.getStateSince() + ",null)")); + + Status s2 = new Status(State.VALID, Instant.now(), ErrorInfo.fromHttpError(401)); + assertThat(s2.toString(), equalTo("Status(VALID," + s2.getStateSince() + "," + s2.getLastError() + ")")); + } + + @Test + public void errorInfoProperties() { + Instant time = Instant.ofEpochMilli(10000); + ErrorInfo e1 = new ErrorInfo(ErrorKind.ERROR_RESPONSE, 401, "nope", time); + assertThat(e1.getKind(), equalTo(ErrorKind.ERROR_RESPONSE)); + assertThat(e1.getStatusCode(), equalTo(401)); + assertThat(e1.getMessage(), equalTo("nope")); + assertThat(e1.getTime(), equalTo(time)); + + ErrorInfo e2 = ErrorInfo.fromHttpError(401); + assertThat(e2.getKind(), equalTo(ErrorKind.ERROR_RESPONSE)); + assertThat(e2.getStatusCode(), equalTo(401)); + assertThat(e2.getMessage(), nullValue()); + assertThat(e2.getTime(), not(nullValue())); + + Exception ex = new Exception("sorry"); + ErrorInfo e3 = ErrorInfo.fromException(ErrorKind.UNKNOWN, ex); + assertThat(e3.getKind(), equalTo(ErrorKind.UNKNOWN)); + assertThat(e3.getStatusCode(), equalTo(0)); + assertThat(e3.getMessage(), equalTo(ex.toString())); + assertThat(e3.getTime(), not(nullValue())); + } + + @Test + public void errorInfoEquality() { + List> allPermutations = new ArrayList<>(); + for (ErrorKind kind: ErrorKind.values()) { + for (int statusCode: new int[] { 0, 1 }) { + for (String message: new String[] { null, "a", "b" }) { + for (Instant time: new Instant[] { Instant.ofEpochMilli(1000), Instant.ofEpochMilli(2000) }) { + allPermutations.add(() -> new ErrorInfo(kind, statusCode, message, time)); + } + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void errorStringRepresentation() { + ErrorInfo e1 = new ErrorInfo(ErrorKind.ERROR_RESPONSE, 401, null, Instant.now()); + assertThat(e1.toString(), equalTo("ERROR_RESPONSE(401)@" + e1.getTime())); + + ErrorInfo e2 = new ErrorInfo(ErrorKind.ERROR_RESPONSE, 401, "nope", Instant.now()); + assertThat(e2.toString(), equalTo("ERROR_RESPONSE(401,nope)@" + e2.getTime())); + + ErrorInfo e3 = new ErrorInfo(ErrorKind.NETWORK_ERROR, 0, "hello", Instant.now()); + assertThat(e3.toString(), equalTo("NETWORK_ERROR(hello)@" + e3.getTime())); + + ErrorInfo e4 = new ErrorInfo(ErrorKind.STORE_ERROR, 0, null, Instant.now()); + assertThat(e4.toString(), equalTo("STORE_ERROR@" + e4.getTime())); + + ErrorInfo e5 = new ErrorInfo(ErrorKind.UNKNOWN, 0, null, null); + assertThat(e5.toString(), equalTo("UNKNOWN")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProviderTypesTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProviderTypesTest.java new file mode 100644 index 0000000..4cdd368 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreStatusProviderTypesTest.java @@ -0,0 +1,79 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.CacheStats; +import com.launchdarkly.sdk.server.interfaces.DataStoreStatusProvider.Status; +import com.launchdarkly.testhelpers.TypeBehavior; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class DataStoreStatusProviderTypesTest { + @Test + public void statusProperties() { + Status s1 = new Status(true, false); + assertThat(s1.isAvailable(), equalTo(true)); + assertThat(s1.isRefreshNeeded(), equalTo(false)); + + Status s2 = new Status(false, true); + assertThat(s2.isAvailable(), equalTo(false)); + assertThat(s2.isRefreshNeeded(), equalTo(true)); + } + + @Test + public void statusEquality() { + List> allPermutations = new ArrayList<>(); + allPermutations.add(() -> new Status(false, false)); + allPermutations.add(() -> new Status(false, true)); + allPermutations.add(() -> new Status(true, false)); + allPermutations.add(() -> new Status(true, true)); + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void statusStringRepresentation() { + assertThat(new Status(true, false).toString(), equalTo("Status(true,false)")); + } + + @Test + public void cacheStatsProperties() { + CacheStats stats = new CacheStats(1, 2, 3, 4, 5, 6); + assertThat(stats.getHitCount(), equalTo(1L)); + assertThat(stats.getMissCount(), equalTo(2L)); + assertThat(stats.getLoadSuccessCount(), equalTo(3L)); + assertThat(stats.getLoadExceptionCount(), equalTo(4L)); + assertThat(stats.getTotalLoadTime(), equalTo(5L)); + assertThat(stats.getEvictionCount(), equalTo(6L)); + } + + @Test + public void cacheStatsEquality() { + List> allPermutations = new ArrayList<>(); + int[] values = new int[] { 0, 1, 2 }; + for (int hit: values) { + for (int miss: values) { + for (int loadSuccess: values) { + for (int loadException: values) { + for (int totalLoad: values) { + for (int eviction: values) { + allPermutations.add(() -> new CacheStats(hit, miss, loadSuccess, loadException, totalLoad, eviction)); + } + } + } + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void cacheStatsStringRepresentation() { + CacheStats stats = new CacheStats(1, 2, 3, 4, 5, 6); + assertThat(stats.toString(), equalTo("{hit=1, miss=2, loadSuccess=3, loadException=4, totalLoadTime=5, evictionCount=6}")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreTypesTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreTypesTest.java new file mode 100644 index 0000000..b51ce32 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/DataStoreTypesTest.java @@ -0,0 +1,164 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSortedMap; +import com.launchdarkly.sdk.server.DataModel; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.DataKind; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.FullDataSet; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.ItemDescriptor; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.KeyedItems; +import com.launchdarkly.sdk.server.subsystems.DataStoreTypes.SerializedItemDescriptor; +import com.launchdarkly.testhelpers.TypeBehavior; + +import org.junit.Test; + +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +@SuppressWarnings("javadoc") +public class DataStoreTypesTest { + @Test + public void dataKindProperties() { + Function serializer = item -> "version=" + item.getVersion(); + Function deserializer = s -> new ItemDescriptor(0, s); + + DataKind k = new DataKind("foo", serializer, deserializer); + + assertThat(k.getName(), equalTo("foo")); + assertThat(k.serialize(new ItemDescriptor(9, null)), equalTo("version=9")); + assertThat(k.deserialize("x"), equalTo(new ItemDescriptor(0, "x"))); + + assertThat(k.toString(), equalTo("DataKind(foo)")); + } + + @Test + public void itemDescriptorProperties() { + Object o = new Object(); + ItemDescriptor i1 = new ItemDescriptor(1, o); + assertThat(i1.getVersion(), equalTo(1)); + assertThat(i1.getItem(), sameInstance(o)); + + ItemDescriptor i2 = ItemDescriptor.deletedItem(2); + assertThat(i2.getVersion(), equalTo(2)); + assertThat(i2.getItem(), nullValue()); + } + + @Test + public void itemDescriptorEquality() { + List> allPermutations = new ArrayList<>(); + for (int version: new int[] { 1, 2 }) { + for (Object item: new Object[] { null, "a", "b" }) { + allPermutations.add(() -> new ItemDescriptor(version, item)); + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void itemDescriptorStringRepresentation() { + assertThat(new ItemDescriptor(1, "a").toString(), equalTo("ItemDescriptor(1,a)")); + assertThat(new ItemDescriptor(2, null).toString(), equalTo("ItemDescriptor(2,null)")); + } + + @Test + public void serializedItemDescriptorProperties() { + SerializedItemDescriptor si1 = new SerializedItemDescriptor(1, false, "x"); + assertThat(si1.getVersion(), equalTo(1)); + assertThat(si1.isDeleted(), equalTo(false)); + assertThat(si1.getSerializedItem(), equalTo("x")); + + SerializedItemDescriptor si2 = new SerializedItemDescriptor(2, true, null); + assertThat(si2.getVersion(), equalTo(2)); + assertThat(si2.isDeleted(), equalTo(true)); + assertThat(si2.getSerializedItem(), nullValue()); + } + + @Test + public void serializedItemDescriptorEquality() { + List> allPermutations = new ArrayList<>(); + for (int version: new int[] { 1, 2 }) { + for (boolean deleted: new boolean[] { true, false }) { + for (String item: new String[] { null, "a", "b" }) { + allPermutations.add(() -> new SerializedItemDescriptor(version, deleted, item)); + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @Test + public void serializedItemDescriptorStringRepresentation() { + assertThat(new SerializedItemDescriptor(1, false, "a").toString(), equalTo("SerializedItemDescriptor(1,false,a)")); + assertThat(new SerializedItemDescriptor(2, true, null).toString(), equalTo("SerializedItemDescriptor(2,true,null)")); + } + + @SuppressWarnings("unchecked") + @Test + public void keyedItemsProperties() { + ItemDescriptor item1 = new ItemDescriptor(1, "a"); + ItemDescriptor item2 = new ItemDescriptor(2, "b"); + + KeyedItems items = new KeyedItems<>(ImmutableSortedMap.of("key1", item1, "key2", item2).entrySet()); + + assertThat(items.getItems(), contains( + new AbstractMap.SimpleEntry<>("key1", item1), + new AbstractMap.SimpleEntry<>("key2", item2) + )); + + KeyedItems emptyItems = new KeyedItems<>(null); + + assertThat(emptyItems.getItems(), emptyIterable()); + } + + @Test + public void keyedItemsEquality() { + List>> allPermutations = new ArrayList<>(); + for (String key: new String[] { "key1", "key2"}) { + for (int version: new int[] { 1, 2 }) { + for (String data: new String[] { null, "a", "b" }) { + allPermutations.add(() -> new KeyedItems<>(ImmutableMap.of(key, new ItemDescriptor(version, data)).entrySet())); + } + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } + + @SuppressWarnings("unchecked") + @Test + public void fullDataSetProperties() { + ItemDescriptor item1 = new ItemDescriptor(1, "a"); + KeyedItems items = new KeyedItems<>(ImmutableMap.of("key1", item1).entrySet()); + FullDataSet data = new FullDataSet<>(ImmutableMap.of(DataModel.FEATURES, items).entrySet()); + + assertThat(data.getData(), contains( + new AbstractMap.SimpleEntry<>(DataModel.FEATURES, items) + )); + + FullDataSet emptyData = new FullDataSet<>(null); + + assertThat(emptyData.getData(), emptyIterable()); + } + + @Test + public void fullDataSetEquality() { + List>> allPermutations = new ArrayList<>(); + for (DataKind kind: new DataKind[] { DataModel.FEATURES, DataModel.SEGMENTS }) { + for (int version: new int[] { 1, 2 }) { + allPermutations.add(() -> new FullDataSet<>( + ImmutableMap.of(kind, + new KeyedItems<>(ImmutableMap.of("key", new ItemDescriptor(version, "a")).entrySet()) + ).entrySet())); + } + } + TypeBehavior.checkEqualsAndHashCode(allPermutations); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/HttpAuthenticationTypesTest.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/HttpAuthenticationTypesTest.java new file mode 100644 index 0000000..fe71a75 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/HttpAuthenticationTypesTest.java @@ -0,0 +1,18 @@ +package com.launchdarkly.sdk.server.interfaces; + +import com.launchdarkly.sdk.server.interfaces.HttpAuthentication.Challenge; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +@SuppressWarnings("javadoc") +public class HttpAuthenticationTypesTest { + @Test + public void challengeProperties() { + Challenge c = new Challenge("Basic", "realm"); + assertThat(c.getScheme(), equalTo("Basic")); + assertThat(c.getRealm(), equalTo("realm")); + } +} diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/package-info.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/package-info.java new file mode 100644 index 0000000..fa170e4 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/interfaces/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing general SDK functionality. + */ +package com.launchdarkly.sdk.server.interfaces; diff --git a/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/package-info.java b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/package-info.java new file mode 100644 index 0000000..b22b716 --- /dev/null +++ b/lib/sdk/server/src/test/java/com/launchdarkly/sdk/server/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing general SDK functionality. + */ +package com.launchdarkly.sdk.server; diff --git a/lib/sdk/server/src/test/resources/filesource/all-properties.json b/lib/sdk/server/src/test/resources/filesource/all-properties.json new file mode 100644 index 0000000..bdcefaa --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/all-properties.json @@ -0,0 +1,21 @@ +{ + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "included": ["user1"] + } + } +} diff --git a/lib/sdk/server/src/test/resources/filesource/all-properties.yml b/lib/sdk/server/src/test/resources/filesource/all-properties.yml new file mode 100644 index 0000000..ce02346 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/all-properties.yml @@ -0,0 +1,17 @@ +--- +flags: + flag1: + key: flag1 + "on": true + fallthrough: + variation: 2 + variations: + - fall + - "off" + - "on" +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + included: ["user1"] diff --git a/lib/sdk/server/src/test/resources/filesource/flag-only.json b/lib/sdk/server/src/test/resources/filesource/flag-only.json new file mode 100644 index 0000000..f0c076b --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/flag-only.json @@ -0,0 +1,12 @@ +{ + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/resources/filesource/flag-only.yml b/lib/sdk/server/src/test/resources/filesource/flag-only.yml new file mode 100644 index 0000000..b71a399 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/flag-only.yml @@ -0,0 +1,11 @@ +--- +flags: + flag1: + key: flag1 + "on": true + fallthrough: + variation: 2 + variations: + - fall + - "off" + - "on" diff --git a/lib/sdk/server/src/test/resources/filesource/flag-with-duplicate-key.json b/lib/sdk/server/src/test/resources/filesource/flag-with-duplicate-key.json new file mode 100644 index 0000000..b6a1ae0 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/flag-with-duplicate-key.json @@ -0,0 +1,12 @@ +{ + "flags": { + "another": { + "key": "another", + "on": true + }, + "flag1": { + "key": "flag1", + "on": false + } + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/resources/filesource/malformed.json b/lib/sdk/server/src/test/resources/filesource/malformed.json new file mode 100644 index 0000000..98232c6 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/malformed.json @@ -0,0 +1 @@ +{ diff --git a/lib/sdk/server/src/test/resources/filesource/malformed.yml b/lib/sdk/server/src/test/resources/filesource/malformed.yml new file mode 100644 index 0000000..c04a34e --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/malformed.yml @@ -0,0 +1,2 @@ +- a +b: ~ diff --git a/lib/sdk/server/src/test/resources/filesource/no-data.json b/lib/sdk/server/src/test/resources/filesource/no-data.json new file mode 100644 index 0000000..e69de29 diff --git a/lib/sdk/server/src/test/resources/filesource/segment-only.json b/lib/sdk/server/src/test/resources/filesource/segment-only.json new file mode 100644 index 0000000..da134d5 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/segment-only.json @@ -0,0 +1,8 @@ +{ + "segments": { + "seg1": { + "key": "seg1", + "included": ["user1"] + } + } +} diff --git a/lib/sdk/server/src/test/resources/filesource/segment-only.yml b/lib/sdk/server/src/test/resources/filesource/segment-only.yml new file mode 100644 index 0000000..cfbab40 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/segment-only.yml @@ -0,0 +1,5 @@ +--- +segments: + seg1: + key: seg1 + included: ["user1"] diff --git a/lib/sdk/server/src/test/resources/filesource/segment-with-duplicate-key.json b/lib/sdk/server/src/test/resources/filesource/segment-with-duplicate-key.json new file mode 100644 index 0000000..a71b5e6 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/segment-with-duplicate-key.json @@ -0,0 +1,12 @@ +{ + "segments": { + "another": { + "key": "another", + "included": [] + }, + "seg1": { + "key": "seg1", + "included": ["user1a"] + } + } +} diff --git a/lib/sdk/server/src/test/resources/filesource/value-only.json b/lib/sdk/server/src/test/resources/filesource/value-only.json new file mode 100644 index 0000000..4b6444e --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/value-only.json @@ -0,0 +1,6 @@ + +{ + "flagValues": { + "flag2": "value2" + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/resources/filesource/value-only.yml b/lib/sdk/server/src/test/resources/filesource/value-only.yml new file mode 100644 index 0000000..821e256 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/value-only.yml @@ -0,0 +1,3 @@ +--- +flagValues: + flag2: value2 diff --git a/lib/sdk/server/src/test/resources/filesource/value-with-duplicate-key.json b/lib/sdk/server/src/test/resources/filesource/value-with-duplicate-key.json new file mode 100644 index 0000000..abd6dc7 --- /dev/null +++ b/lib/sdk/server/src/test/resources/filesource/value-with-duplicate-key.json @@ -0,0 +1,6 @@ +{ + "flagValues": { + "flag1": "value1", + "flag2": "value2a" + } +} \ No newline at end of file diff --git a/lib/sdk/server/src/test/resources/logback.xml b/lib/sdk/server/src/test/resources/logback.xml new file mode 100644 index 0000000..f27284e --- /dev/null +++ b/lib/sdk/server/src/test/resources/logback.xml @@ -0,0 +1,16 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36}:%line - %msg%n + + + + + + + + + + diff --git a/lib/shared/common/gradle.properties b/lib/shared/common/gradle.properties index edcd2c6..7fd8369 100644 --- a/lib/shared/common/gradle.properties +++ b/lib/shared/common/gradle.properties @@ -1,4 +1,7 @@ +#x-release-please-start-version version=2.1.1 +#x-release-please-end + # The following empty ossrh properties are used by LaunchDarkly's internal integration testing framework # and should not be needed for typical development purposes (including by third-party developers). ossrhUsername= diff --git a/lib/shared/internal/gradle.properties b/lib/shared/internal/gradle.properties index 3fd260b..f893bbc 100644 --- a/lib/shared/internal/gradle.properties +++ b/lib/shared/internal/gradle.properties @@ -1,4 +1,7 @@ +#x-release-please-start-version version=1.3.0 +#x-release-please-end + # The following empty ossrh properties are used by LaunchDarkly's internal integration testing framework # and should not be needed for typical development purposes (including by third-party developers). ossrhUsername= diff --git a/release-please-config.json b/release-please-config.json index c76408f..0b476be 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -26,6 +26,15 @@ "extra-files": [ "gradle.properties" ] + }, + "lib/sdk/server": { + "release-type": "simple", + "bump-minor-pre-major": true, + "include-v-in-tag": false, + "extra-files": [ + "gradle.properties", + "src/main/java/com/launchdarkly/sdk/server/Version.java" + ] } } }