diff --git a/.github/ISSUE_TEMPLATE/package-sdk-common--bug_report.md b/.github/ISSUE_TEMPLATE/package-sdk-common--bug_report.md index 4cf1e86..d698897 100644 --- a/.github/ISSUE_TEMPLATE/package-sdk-common--bug_report.md +++ b/.github/ISSUE_TEMPLATE/package-sdk-common--bug_report.md @@ -1,20 +1,36 @@ --- -name: Feature request for the java-sdk-common package -about: Suggest an idea for this project +name: 'Bug report for the java-sdk-common package' +about: Create a report to help us improve title: '' -labels: 'package: java-sdk-common, enhancement' +labels: 'package: java-sdk-common, bug' assignees: '' - --- -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I would love to see the SDK [...does something new...] +**Is this a support request?** +This issue tracker is maintained by LaunchDarkly SDK developers and is intended for feedback on the code in this library. If you're not sure whether the problem you are having is specifically related to this library, or to the LaunchDarkly service overall, it may be more appropriate to contact the LaunchDarkly support team; they can help to investigate the problem and will consult the SDK team if necessary. You can submit a support request by going [here](https://support.launchdarkly.com/) and clicking "submit a request", or by emailing support@launchdarkly.com. + +Note that issues filed on this issue tracker are publicly accessible. Do not provide any private account information on your issues. If your problem is specific to your account, you should submit a support request as described above. + +**Describe the bug** +A clear and concise description of what the bug is. + +**To reproduce** +Steps to reproduce the behavior. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Logs** +If applicable, add any log output related to your problem. + +**SDK version** +The version of this SDK that you are using. -**Describe the solution you'd like** -A clear and concise description of what you want to happen. +**Language version, developer tools** +For instance, Go 1.11 or Ruby 2.5.3. If you are using a language that requires a separate compiler, such as C, please include the name and version of the compiler too. -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. +**OS/platform** +For instance, Ubuntu 16.04, Windows 10, or Android 4.0.3. If your code is running in a browser, please also include the browser type and version. **Additional context** -Add any other context about the feature request here. \ No newline at end of file +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/package-sdk-common--feature_request.md b/.github/ISSUE_TEMPLATE/package-sdk-common--feature_request.md new file mode 100644 index 0000000..4cf1e86 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/package-sdk-common--feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request for the java-sdk-common package +about: Suggest an idea for this project +title: '' +labels: 'package: java-sdk-common, enhancement' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I would love to see the SDK [...does something new...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context about the feature request here. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/package-sdk-commonl--bug_report.md b/.github/ISSUE_TEMPLATE/package-sdk-internal--bug_report.md similarity index 94% rename from .github/ISSUE_TEMPLATE/package-sdk-commonl--bug_report.md rename to .github/ISSUE_TEMPLATE/package-sdk-internal--bug_report.md index d698897..ff0f432 100644 --- a/.github/ISSUE_TEMPLATE/package-sdk-commonl--bug_report.md +++ b/.github/ISSUE_TEMPLATE/package-sdk-internal--bug_report.md @@ -1,8 +1,8 @@ --- -name: 'Bug report for the java-sdk-common package' +name: 'Bug report for the java-sdk-internal package' about: Create a report to help us improve title: '' -labels: 'package: java-sdk-common, bug' +labels: 'package: java-sdk-internal, bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/package-sdk-internal--feature_request.md b/.github/ISSUE_TEMPLATE/package-sdk-internal--feature_request.md new file mode 100644 index 0000000..5ed3aa5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/package-sdk-internal--feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request for the java-sdk-internal package +about: Suggest an idea for this project +title: '' +labels: 'package: java-sdk-internal, enhancement' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I would love to see the SDK [...does something new...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context about the feature request here. \ No newline at end of file diff --git a/.github/workflows/java-sdk-internal.yml b/.github/workflows/java-sdk-internal.yml new file mode 100644 index 0000000..90a8f8a --- /dev/null +++ b/.github/workflows/java-sdk-internal.yml @@ -0,0 +1,23 @@ +name: java-sdk-internal + +on: + push: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' #Do not need to run CI for markdown changes. + pull_request: + branches: [main, 'feat/**'] + paths-ignore: + - '**.md' + +jobs: + build-test-java-sdk-internal: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Shared CI Steps + uses: ./.github/actions/ci + with: + workspace_path: 'lib/shared/internal' + java_version: 8 diff --git a/.github/workflows/manual-publish-docs.yml b/.github/workflows/manual-publish-docs.yml index 837956d..4894a74 100644 --- a/.github/workflows/manual-publish-docs.yml +++ b/.github/workflows/manual-publish-docs.yml @@ -8,6 +8,7 @@ on: options: - lib/java-server-sdk-otel - lib/shared/common + - lib/shared/internal dry_run: description: 'Is this a dry run. If so no docs will be published.' type: boolean diff --git a/.github/workflows/manual-publish.yml b/.github/workflows/manual-publish.yml index 9330982..f0fb671 100644 --- a/.github/workflows/manual-publish.yml +++ b/.github/workflows/manual-publish.yml @@ -9,6 +9,7 @@ on: options: - lib/java-server-sdk-otel - lib/shared/common + - lib/shared/internal prerelease: description: 'Is this a prerelease.' type: boolean diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 86ccdff..419467f 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -52,6 +52,39 @@ jobs: aws_role: ${{ vars.AWS_ROLE_ARN }} token: ${{ secrets.GITHUB_TOKEN }} + release-sdk-internal: + runs-on: ubuntu-latest + needs: release-please + permissions: + id-token: write + contents: write + pull-requests: write + if: ${{ needs.release-please.outputs.package-sdk-internal-released == 'true'}} + steps: + - uses: actions/checkout@v4 + + - uses: launchdarkly/gh-actions/actions/release-secrets@release-secrets-v1.1.0 + name: Get secrets + with: + aws_assume_role: ${{ vars.AWS_ROLE_ARN }} + ssm_parameter_pairs: '/production/common/releasing/sonatype/username = SONATYPE_USER_NAME, + /production/common/releasing/sonatype/password = SONATYPE_PASSWORD' + s3_path_pairs: 'launchdarkly-releaser/java/code-signing-keyring.gpg = code-signing-keyring.gpg' + + - uses: ./.github/actions/full-release + with: + workspace_path: lib/shared/internal + dry_run: true #TODO make this false after testing + prerelease: true #TODO make this false after testing + code_signing_keyring: 'code-signing-keyring.gpg' + signing_key_id: ${{ env.SIGNING_KEY_ID }} + signing_key_passphrase: ${{ env.SIGNING_KEY_PASSPHRASE }} + sonatype_username: ${{ env.SONATYPE_USER_NAME }} + sonatype_password: ${{ env.SONATYPE_PASSWORD }} + aws_role: ${{ vars.AWS_ROLE_ARN }} + token: ${{ secrets.GITHUB_TOKEN }} + + release-sdk-common: runs-on: ubuntu-latest needs: release-please diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1022305..f93410f 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,5 @@ { "lib/java-server-sdk-otel": "0.1.0", - "lib/shared/common": "2.1.1" + "lib/shared/common": "2.1.1", + "lib/shared/internal": "1.3.0" } diff --git a/README.md b/README.md index 1e7f587..0ca087c 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,10 @@ This includes shared libraries, used by SDKs and other tools, as well as SDKs. ## Packages -| Shared Packages | API Docs | maven | issues | tests | -| ---------------------------------------------------------------- |--------------------------------------------------------------------| ---------------------------------------------------------- | ------------------------------------- | ------------------------------------------------------------- | -| [@launchdarkly/java-sdk-common](lib/shared/common/README.md) | [![Documentation][sdk-common-docs-badge]][sdk-common-docs-link] | [![maven][sdk-common-maven-badge]][sdk-common-maven-link] | [Issues][sdk-common-issues] | [![Actions Status][sdk-common-ci-badge]][sdk-common-ci-link] | +| Shared Packages | API Docs | maven | issues | tests | +| ------------------------------------------------------------------ |---------------------------------------------------------------------| ------------------------------------------------------------- | ------------------------------------- | ----------------------------------------------------------------- | +| [@launchdarkly/java-sdk-internal](lib/shared/internal/README.md) | [![Documentation][sdk-internal-docs-badge]][sdk-internal-docs-link] | [![maven][sdk-internal-maven-badge]][sdk-internal-maven-link] | [Issues][sdk-internal-issues] | [![Actions Status][sdk-internal-ci-badge]][sdk-internal-ci-link] | +| [@launchdarkly/java-sdk-common](lib/shared/common/README.md) | [![Documentation][sdk-common-docs-badge]][sdk-common-docs-link] | [![maven][sdk-common-maven-badge]][sdk-common-maven-link] | [Issues][sdk-common-issues] | [![Actions Status][sdk-common-ci-badge]][sdk-common-ci-link] | | Telemetry Packages | API Docs | maven | issues | tests | | ---------------------------------------------------------------------------- |--------------------------------------------------------------| ---------------------------------------------------------- | ------------------------------------- | ------------------------------------------------------------- | @@ -57,6 +58,15 @@ We encourage pull requests and other contributions from the community. Check out [server-otel-docs-badge]: https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8 [server-otel-docs-link]: https://launchdarkly.github.io/java-core/lib/java-server-sdk-otel/ +[//]: # 'java-sdk-internal' +[sdk-internal-issues]: https://github.com/launchdarkly/java-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+java-sdk-internal%22+ +[sdk-internal-maven-badge]: https://img.shields.io/maven-central/v/com.launchdarkly/launchdarkly-java-sdk-internal +[sdk-internal-maven-link]: https://central.sonatype.com/artifact/com.launchdarkly/launchdarkly-java-sdk-internal +[sdk-internal-ci-badge]: https://github.com/launchdarkly/java-core/actions/workflows/java-sdk-internal.yml/badge.svg +[sdk-internal-ci-link]: https://github.com/launchdarkly/java-core/actions/workflows/java-sdk-internal.yml +[sdk-internal-docs-badge]: https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8 +[sdk-internal-docs-link]: https://launchdarkly.github.io/java-core/lib/shared/internal/ + [//]: # 'java-sdk-common' [sdk-common-issues]: https://github.com/launchdarkly/java-core/issues?q=is%3Aissue+is%3Aopen+label%3A%22package%3A+java-sdk-common%22+ [sdk-common-maven-badge]: https://img.shields.io/maven-central/v/com.launchdarkly/launchdarkly-java-sdk-common diff --git a/lib/shared/internal/.gitignore b/lib/shared/internal/.gitignore new file mode 100644 index 0000000..d8bea40 --- /dev/null +++ b/lib/shared/internal/.gitignore @@ -0,0 +1,22 @@ +# Eclipse project files +.classpath +.project +.settings + +# Intellij project files +*.iml +*.ipr +*.iws +.idea/ + +#Gradle +.gradletasknamecache +.gradle/ +build/ +bin/ +out/ +classes/ + +# Test code that gets temporarily copied by our Android CI build +src/androidTest/java/com/launchdarkly/sdk/internal/**/*.java +!src/androidTest/java/com/launchdarkly/sdk/internal/BaseTest.java diff --git a/lib/shared/internal/CHANGELOG.md b/lib/shared/internal/CHANGELOG.md new file mode 100644 index 0000000..2463259 --- /dev/null +++ b/lib/shared/internal/CHANGELOG.md @@ -0,0 +1,29 @@ +# Change log + +All notable changes to the project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). + +## [1.3.0] - 2024-03-13 +### Changed: +- Redact anonymous attributes within feature events +- Always inline contexts for feature events + +## [1.2.1] - 2023-11-14 +### Fixed: +- Fixes NPE when interacting with Context created by use of `copyFrom`. (Thanks, [ +pedroafonsodias](https://github.com/launchdarkly/java-sdk-common/pull/15)) + +## [1.2.0] - 2023-10-11 +### Added: +- Added support for the migration operation event. +- Added support for event sampling for feature events and migration operation events. + +## [1.1.1] - 2023-06-27 +### Changed: +- Bumping Guava version to incorporate CVE fixes. + +## [1.1.0] - 2023-03-21 +### Added: +- Additional query param related functionality to HttpHelpers + +## [1.0.0] - 2022-12-05 +Initial release of this project, for use in the upcoming 6.0.0 release of the LaunchDarkly Java SDK and 4.0.0 release of the LaunchDarkly Android SDK. diff --git a/lib/shared/internal/CODEOWNERS b/lib/shared/internal/CODEOWNERS new file mode 100644 index 0000000..f541913 --- /dev/null +++ b/lib/shared/internal/CODEOWNERS @@ -0,0 +1,2 @@ +# Repository Maintainers +* @launchdarkly/team-sdk-java diff --git a/lib/shared/internal/CONTRIBUTING.md b/lib/shared/internal/CONTRIBUTING.md new file mode 100644 index 0000000..6b5f35b --- /dev/null +++ b/lib/shared/internal/CONTRIBUTING.md @@ -0,0 +1,78 @@ +# Contributing to the LaunchDarkly SDK Java Internal Common Code + +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this project. + +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/java-sdk-internal/issues) in the GitHub repository. Bug reports and feature requests specific to this project should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. + +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +## Access modifiers + +The types in this library are meant to be consumed by our SDK code, and never seen by application developers. However, for any type that will be referenced directly from SDK code (as opposed to an implementation detail that is only referenced from within the `java-sdk-internal` code), the access modifier must be `public`. These types cannot be package-private, because we need to be able to access them from SDK code in multiple packages (e.g. `com.launchdarkly.sdk.server` versus `com.launchdarkly.sdk.android`). + +That means it is technically possible for application code to see these types; the compiler will not stop a developer from referencing them. However: + +1. We are explicitly declaring all APIs in this library to be unsupported for customer use, so any such use is at the developer's own risk. +2. Generated Javadoc documentation for the SDKs will not show these types, since they are in a dependency of the SDK rather than in the main SDK jar (and, in the case of the server-side Java SDK, these classes are obfuscated via shading). + +## Versioning + +The semantic versioning of this package refers to how the package is used from the point of view of internal SDK code. This is intentionally decoupled from the versioning of the SDKs themselves. + +If a feature is added for the SDKs to use, such as a new helper class or a new overload of an existing method, then a minor version increment is appropriate. That does _not_ mean that the SDKs themselves would have a minor version increment, unless they are exposing some new functionality for application code to use. + +If a change is made that is not backward-compatible, so SDK code will need to be modified to be able to use the new release, then a major version increment is appropriate. Again, that does _not_ mean that the SDKs themselves would have a major version increment, unless they have a breaking change from the point of view of application code. + +## Build instructions + +### Prerequisites + +The project builds with [Gradle](https://gradle.org/) and should be built against Java 8. + +### Building + +To build the project without running any tests: +``` +./gradlew jar +``` + +If you wish to clean your working directory between builds, you can clean it by running: +``` +./gradlew clean +``` + +If you wish to use your generated SDK artifact by another Maven/Gradle project such as [java-server-sdk](https://github.com/launchdarkly/java-server-sdk), you will likely want to publish the artifact to your local Maven repository so that your other project can access it. +``` +./gradlew publishToMavenLocal +``` + +### Testing + +To build the project and run all unit tests: +``` +./gradlew test +``` + +## Note on Java version, Android support, and dependencies + +This project can be used both in server-side Java and in Android. Its minimum Java version is 8, but not all Java 8 APIs and syntax are supported in Android. The CI jobs for this project include an Android job that runs all of the unit tests in Android, to verify that no unsupported APIs are being used. + +## Code coverage + +It is important to keep unit test coverage as close to 100% as possible in this project, since the SDK projects will not exercise every `java-sdk-internal` method in their own unit tests. + +You can view the latest code coverage report in CircleCI, as `coverage/html/index.html` in the artifacts for the "Java 11 - Linux - OpenJDK" job. You can also run the report locally with `./gradlew jacocoTestCoverage` and view `./build/reports/jacoco/test`. + +Sometimes a gap in coverage is unavoidable, usually because the compiler requires us to provide a code path for some condition that in practice can't happen and can't be tested, or because of a known issue with the code coverage tool. Please handle all such cases as follows: + +* Mark the code with an explanatory comment beginning with "COVERAGE:". +* Run the code coverage task with `./gradlew jacocoTestCoverageVerification`. It should fail and indicate how many lines of missed coverage exist in the method you modified. +* Add an item in the `knownMissedLinesForMethods` map in `build.gradle` that specifies that number of missed lines for that method signature. + +## Note on dependencies + +Because this project can be used in Android, it's important to avoid heavyweight runtime dependencies. For instance, as convenient as Guava can be, we should not use Guava at all (except possibly in _test_ code) because it is a large library-- and also because if the application does use Guava, we don't want to have to worry about conflicting with whatever version they're using. diff --git a/lib/shared/internal/LICENSE b/lib/shared/internal/LICENSE new file mode 100644 index 0000000..d238a2b --- /dev/null +++ b/lib/shared/internal/LICENSE @@ -0,0 +1,13 @@ +Copyright 2022 Catamorphic, Co. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/lib/shared/internal/README.md b/lib/shared/internal/README.md new file mode 100644 index 0000000..18d6561 --- /dev/null +++ b/lib/shared/internal/README.md @@ -0,0 +1,28 @@ +# LaunchDarkly SDK Java Internal Common Code + +[![Circle CI](https://circleci.com/gh/launchdarkly/java-sdk-internal.svg?style=shield)](https://circleci.com/gh/launchdarkly/java-sdk-internal) +[![Javadocs](http://javadoc.io/badge/com.launchdarkly/launchdarkly-java-sdk-internal.svg)](http://javadoc.io/doc/com.launchdarkly/launchdarkly-java-sdk-internal) + +This project contains Java classes and interfaces that are shared between the LaunchDarkly Java and Android SDKs. These are internal implementation details that are not part of the supported SDK APIs and should not be used by application code; no types in this package should be exposed directly in the public APIs. Code that is specific to one or the other SDK is in [java-server-sdk](https://github.com/launchdarkly/java-server-sdk) or [android-client-sdk](https://github.com/launchdarkly/android-client-sdk), and public APIs that are common to both are in [java-sdk-common](https://github.com/launchdarkly/java-sdk-common). + +## Supported Java versions + +This version of the library works with Java 8 and above, or Android. + +## Contributing + +See [Contributing](./CONTRIBUTING.md). + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates diff --git a/lib/shared/internal/SECURITY.md b/lib/shared/internal/SECURITY.md new file mode 100644 index 0000000..10f1d1a --- /dev/null +++ b/lib/shared/internal/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting and Fixing Security Issues + +Please report all security issues to the LaunchDarkly security team by submitting a bug bounty report to our [HackerOne program](https://hackerone.com/launchdarkly?type=team). LaunchDarkly will triage and address all valid security issues following the response targets defined in our program policy. Valid security issues may be eligible for a bounty. + +Please do not open issues or pull requests for security issues. This makes the problem immediately visible to everyone, including potentially malicious actors. diff --git a/lib/shared/internal/build-android.gradle b/lib/shared/internal/build-android.gradle new file mode 100644 index 0000000..6c81614 --- /dev/null +++ b/lib/shared/internal/build-android.gradle @@ -0,0 +1,79 @@ +apply plugin: "com.android.library" // for some reason Gradle complains if this is in the plugins section +//apply plugin: "com.getkeepsafe.dexcount" + +buildscript { + repositories { + mavenCentral() + mavenLocal() + google() + } + dependencies { + classpath 'com.android.tools.build:gradle:4.2.0' + } +} +// This Gradle script is used only when we are running tests in an Android environment to verify +// that the project is Android-compatible. We do not publish an Android build - that is done in +// the android-client-sdk project. + +repositories { + mavenLocal() + // Before LaunchDarkly release artifacts get synced to Maven Central they are here along with snapshots: + maven { url "https://oss.sonatype.org/content/groups/public/" } + mavenCentral() + google() +} + +allprojects { + group = ProjectValues.groupId + version = "${version}" // from gradle.properties + archivesBaseName = ProjectValues.artifactId +} + +android { + compileSdkVersion(30) + buildToolsVersion "30.0.3" + + defaultConfig { + minSdkVersion(21) + targetSdkVersion(30) + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + + consumerProguardFiles 'consumer-proguard-rules.pro' + + // The following argument makes the Android Test Orchestrator run its + // "pm clear" command after each test invocation. This command ensures + // that the app's state is completely cleared between tests. + testInstrumentationRunnerArguments clearPackageData: 'true' + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + testOptions { + execution = "ANDROIDX_TEST_ORCHESTRATOR" + } + + packagingOptions { + exclude 'META-INF/**' + } + + useLibrary("android.test.runner") + useLibrary("android.test.base") + useLibrary("android.test.mock") +} + +dependencies { // see Dependencies.kt in buildSrc + Libs.implementation.each { implementation(it) } + Libs.javaTestImplementation.forEach { androidTestImplementation(it) } + Libs.androidTestImplementation.each { androidTestImplementation(it) } + + androidTestImplementation("com.launchdarkly:test-helpers:${Versions.testHelpers}") { + exclude group: "org.eclipse.jetty" // we don't use the HTTP helpers and they don't work in Android + exclude group: "com.squareup.okhttp3" // also unused, causes dex limit to be exceeded + } + + androidTestUtil("androidx.test:orchestrator:1.4.1") +} diff --git a/lib/shared/internal/build.gradle.kts b/lib/shared/internal/build.gradle.kts new file mode 100644 index 0000000..61f515e --- /dev/null +++ b/lib/shared/internal/build.gradle.kts @@ -0,0 +1,98 @@ +import java.time.Duration + +// These values come from gradle.properties +val ossrhUsername: String by project +val ossrhPassword: String by project + +buildscript { + repositories { + mavenCentral() + mavenLocal() + } +} + +plugins { // see Dependencies.kt in buildSrc + Libs.javaBuiltInGradlePlugins.forEach { id(it) } + Libs.javaExtGradlePlugins.forEach { (n, v) -> id(n) version v } +} + +repositories { + mavenLocal() + // Before LaunchDarkly release artifacts get synced to Maven Central they are here along with snapshots: + maven { url = uri("https://oss.sonatype.org/content/groups/public/") } + mavenCentral() +} + +configurations.all { + // check for updates every build for dependencies with: 'changing: true' + resolutionStrategy.cacheChangingModulesFor(0, "seconds") +} + +base { + group = ProjectValues.groupId + archivesBaseName = ProjectValues.artifactId + version = version +} + +java { + withJavadocJar() + withSourcesJar() + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} + +dependencies { // see Dependencies.kt in buildSrc + Libs.implementation.forEach { api(it)} + Libs.javaTestImplementation.forEach { testImplementation(it) } + + testImplementation("com.launchdarkly:test-helpers:${Versions.testHelpers}") + // see build-android.gradle about the reason for special-casing this +} + +checkstyle { + toolVersion = "9.3" + configFile = file("${project.rootDir}/checkstyle.xml") +} + +helpers.Javadoc.configureTask(tasks.javadoc, null) // see Javadoc.kt in buildSrc + +helpers.Test.configureTask(tasks.compileTestJava, tasks.test, null) // see Test.kt in buildSrc + +helpers.Jacoco.configureTasks( // see Jacoco.kt in buildSrc + tasks.jacocoTestReport, + tasks.jacocoTestCoverageVerification +) + +helpers.Idea.configure(idea) + +publishing { + publications { + create("mavenJava") { + from(components["java"]) + + helpers.Pom.standardPom(pom) // see Pom.kt in buildSrc + } + } + repositories { + mavenLocal() + } +} + +nexusStaging { + packageGroup = ProjectValues.groupId + numberOfRetries = 40 // we've seen extremely long delays in closing repositories +} + +nexusPublishing { + clientTimeout.set(Duration.ofMinutes(2)) // we've seen extremely long delays in creating repositories + repositories { + sonatype { + username.set(ossrhUsername) + password.set(ossrhPassword) + } + } +} + +signing { + sign(publishing.publications["mavenJava"]) +} diff --git a/lib/shared/internal/buildSrc/build.gradle.kts b/lib/shared/internal/buildSrc/build.gradle.kts new file mode 100644 index 0000000..8fe36d0 --- /dev/null +++ b/lib/shared/internal/buildSrc/build.gradle.kts @@ -0,0 +1,20 @@ + +// This build script controls the building of the shared Gradle code in +// buildSrc. Putting code under buildSrc allows us to break it up for better +// clarity, leaving a much simpler build script at the top level of the repo. + +// For the java-sdk-common project, this also allows us to share some values +// between build.gradle.kts and build-android.gradle in a clearer way than +// the old method of including a shared build script. + +// Things that are specific to this project, like dependencies, are in +// buildSrc/src/main/kotlin. Reusable helper code that isn't specific to this +// project is in buildSrc/src/main/kotlin/helpers. + +plugins { + `kotlin-dsl` +} + +repositories { + gradlePluginPortal() +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/Dependencies.kt b/lib/shared/internal/buildSrc/src/main/kotlin/Dependencies.kt new file mode 100644 index 0000000..2b71ae6 --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/Dependencies.kt @@ -0,0 +1,58 @@ + +// Centralize dependencies here instead of writing them out in the top-level +// build script(s). + +object Versions { + const val gson = "2.8.9" + const val launchdarklyJavaSdkCommon = "2.1.1" + const val launchdarklyLogging = "1.1.1" + const val okhttp = "4.9.1" + const val testHelpers = "1.2.0" +} + +object PluginVersions { + const val nexusPublish = "0.3.0" + const val nexusStaging = "0.30.0" +} + +object Libs { + val implementation = listOf( + // We would put anything here that we want to go into the Gradle "implementation" + // configuration, if and only if we want those things to show up in pom.xml. + "com.google.code.gson:gson:${Versions.gson}", + "com.launchdarkly:launchdarkly-java-sdk-common:${Versions.launchdarklyJavaSdkCommon}", + "com.launchdarkly:launchdarkly-logging:${Versions.launchdarklyLogging}", + "com.squareup.okhttp3:okhttp:${Versions.okhttp}" + ) + + val javaTestImplementation = listOf( + "junit:junit:4.12", + "org.hamcrest:hamcrest-library:1.3", + "com.google.guava:guava:32.0.1-jre" + + // "com.launchdarkly:test-helpers:${Versions.testHelpers}" + // test-helpers is special-cased in build.gradle.kts and build-android.gradle + ) + + val androidTestImplementation = javaTestImplementation + listOf( + "androidx.test:core:1.4.0", + "androidx.test:runner:1.4.0", + "androidx.test:rules:1.4.0", + "androidx.test.ext:junit:1.1.3" + ) + + val javaBuiltInGradlePlugins = listOf( + "java", + "java-library", + "checkstyle", + "signing", + "maven-publish", + "idea", + "jacoco" + ) + + val javaExtGradlePlugins = mapOf( + "de.marcphilipp.nexus-publish" to PluginVersions.nexusPublish, + "io.codearte.nexus-staging" to PluginVersions.nexusStaging + ) +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/ProjectValues.kt b/lib/shared/internal/buildSrc/src/main/kotlin/ProjectValues.kt new file mode 100644 index 0000000..74f518b --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/ProjectValues.kt @@ -0,0 +1,15 @@ + +// This file defines basic properties of the project that are used in the +// build script and the helper code. + +object ProjectValues { + const val groupId = "com.launchdarkly" + const val artifactId = "launchdarkly-java-sdk-internal" + const val description = "LaunchDarkly SDK Java Shared Implementation" + const val githubRepo = "launchdarkly/java-sdk-internal" + + const val sdkBasePackage = "com.launchdarkly.sdk.internal" + + const val pomDeveloperName = "LaunchDarkly SDK Team" + const val pomDeveloperEmail = "sdks@launchdarkly.com" +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/TestCoverageOverrides.kt b/lib/shared/internal/buildSrc/src/main/kotlin/TestCoverageOverrides.kt new file mode 100644 index 0000000..b72c873 --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/TestCoverageOverrides.kt @@ -0,0 +1,24 @@ + +// See notes in CONTRIBUTING.md on code coverage. Unfortunately we can't configure +// line-by-line code coverage overrides within the source code itself, because Jacoco +// operates on bytecode. + +// These values are used by helpers/Jacoco.kt. + +object TestCoverageOverrides { + val prefixForAllMethodSignatures = ProjectValues.sdkBasePackage + "." + + // Each entry in methodsWithMissedLineCount is an override to tell the Jacoco plugin + // that we're aware of a gap in our test coverage and are OK with it. In each entry, + // the key is the method signature and the value is the number of lines that we + // expect Jacoco to report as missed. + val methodsWithMissedLineCount = mapOf( + "Placeholder.Placeholder()" to 1 + ).mapKeys { prefixForAllMethodSignatures + it.key } + + // Each entry in methodsToSkip is an override to tell the Jacoco plugin to ignore + // code coverage in the method with the specified signature. + val methodsToSkip = listOf( + "" + ).map { prefixForAllMethodSignatures + it } +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Idea.kt b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Idea.kt new file mode 100644 index 0000000..c2f85d5 --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Idea.kt @@ -0,0 +1,16 @@ +package helpers + +import org.gradle.api.tasks.TaskProvider +import org.gradle.plugins.ide.idea.model.IdeaModel + +// Idea.configure provides reusable configuration logic for the Idea +// behavior we normally use. + +object Idea { + fun configure(ideaModel: IdeaModel) { + ideaModel.module { + isDownloadJavadoc = true + isDownloadSources = true + } + } +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Jacoco.kt b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Jacoco.kt new file mode 100644 index 0000000..a824418 --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Jacoco.kt @@ -0,0 +1,52 @@ +package helpers + +import org.gradle.api.tasks.TaskProvider +import org.gradle.testing.jacoco.tasks.JacocoReport +import org.gradle.testing.jacoco.tasks.JacocoCoverageVerification + +// Jacoco.configureTasks provides reusable configuration logic for using the Jacoco +// test coverage plugin in a Java project. See also: TestCoverageOverrides.kt + +object Jacoco { + fun configureTasks(reportTask: TaskProvider, + verificationTask: TaskProvider) { + reportTask.configure { + reports { + xml.required.set(true) + csv.required.set(true) + html.required.set(true) + } + } + + verificationTask.configure { + // See notes in CONTRIBUTING.md on code coverage. Unfortunately we can't configure line-by-line code + // coverage overrides within the source code itself, because Jacoco operates on bytecode. + violationRules { + TestCoverageOverrides.methodsWithMissedLineCount.forEach { signature, maxMissedLines -> + rule { + element = "METHOD" + includes = listOf(signature) + limit { + counter = "LINE" + value = "MISSEDCOUNT" + maximum = maxMissedLines.toBigDecimal() + } + } + } + + // General rule that we should expect 100% test coverage; exclude any methods that + // have overrides in TestCoverageOverrides. + rule { + element = "METHOD" + limit { + counter = "LINE" + value = "MISSEDCOUNT" + maximum = 0.toBigDecimal() + } + excludes = TestCoverageOverrides.methodsWithMissedLineCount.map { it.key } + + TestCoverageOverrides.methodsToSkip + } + } + } + } +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Javadoc.kt b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Javadoc.kt new file mode 100644 index 0000000..2549ebc --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Javadoc.kt @@ -0,0 +1,21 @@ +package helpers + +import org.gradle.api.artifacts.Configuration +import org.gradle.api.tasks.TaskProvider +import org.gradle.api.tasks.javadoc.Javadoc +import org.gradle.external.javadoc.CoreJavadocOptions + +object Javadoc { + fun configureTask(javadocTask: TaskProvider, classpathConfig: Configuration?) { + javadocTask.configure { + // Force the Javadoc build to fail if there are any Javadoc warnings. See: https://discuss.gradle.org/t/javadoc-fail-on-warning/18141/3 + // See JDK-8200363 (https://bugs.openjdk.java.net/browse/JDK-8200363) + // for information about the -Xwerror option. + (options as CoreJavadocOptions).addStringOption("Xwerror") + + if (classpathConfig != null) { + classpath += classpathConfig + } + } + } +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Pom.kt b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Pom.kt new file mode 100644 index 0000000..ac9906e --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Pom.kt @@ -0,0 +1,31 @@ +package helpers + +import org.gradle.api.publish.maven.MavenPom + +// Pom.standardPom provides reusable logic for setting the pom.xml properties +// of LaunchDarkly packages. It gets its values from ProjectValues.kt. + +object Pom { + fun standardPom(pom: MavenPom) { + pom.name.set(ProjectValues.artifactId) + pom.description.set(ProjectValues.description) + pom.url.set("https://github.com/${ProjectValues.githubRepo}") + pom.licenses { + license { + url.set("http://www.apache.org/licenses/LICENSE-2.0.txt") + name.set("The Apache License, Version 2.0") + } + } + pom.developers { + developer { + name.set(ProjectValues.pomDeveloperName) + email.set(ProjectValues.pomDeveloperEmail) + } + } + pom.scm { + connection.set("scm:git:git://github.com/${ProjectValues.githubRepo}.git") + developerConnection.set("scm:git:ssh:git@github.com:${ProjectValues.githubRepo}.git") + url.set("https://github.com/${ProjectValues.githubRepo}") + } + } +} diff --git a/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Test.kt b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Test.kt new file mode 100644 index 0000000..c213db5 --- /dev/null +++ b/lib/shared/internal/buildSrc/src/main/kotlin/helpers/Test.kt @@ -0,0 +1,34 @@ +package helpers + +import org.gradle.api.artifacts.Configuration +import org.gradle.api.tasks.TaskProvider +import org.gradle.api.tasks.compile.JavaCompile +import org.gradle.api.tasks.testing.Test +import org.gradle.api.tasks.testing.logging.TestExceptionFormat + +// Test.configureTask provides reusable configuration logic for the Java test +// behavior we normally use. + +object Test { + fun configureTask(compileTestTask: TaskProvider, testTask: TaskProvider, + classpathConfig: Configuration?) { + + compileTestTask.configure { + if (classpathConfig != null) { + classpath += classpathConfig + } + } + + testTask.configure { + testLogging { + events("passed", "skipped", "failed", "standardOut", "standardError") + showStandardStreams = true + exceptionFormat = TestExceptionFormat.FULL + } + + if (classpathConfig != null) { + classpath += classpathConfig + } + } + } +} diff --git a/lib/shared/internal/checkstyle.xml b/lib/shared/internal/checkstyle.xml new file mode 100644 index 0000000..7800907 --- /dev/null +++ b/lib/shared/internal/checkstyle.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/lib/shared/internal/gradle.properties b/lib/shared/internal/gradle.properties new file mode 100644 index 0000000..3fd260b --- /dev/null +++ b/lib/shared/internal/gradle.properties @@ -0,0 +1,8 @@ +version=1.3.0 +# The following empty ossrh properties are used by LaunchDarkly's internal integration testing framework +# and should not be needed for typical development purposes (including by third-party developers). +ossrhUsername= +ossrhPassword= + +# Used only in Android CI test build: +android.useAndroidX=true diff --git a/lib/shared/internal/gradle.properties.example b/lib/shared/internal/gradle.properties.example new file mode 100644 index 0000000..058697d --- /dev/null +++ b/lib/shared/internal/gradle.properties.example @@ -0,0 +1,8 @@ +# To release a version of this SDK, copy this file to ~/.gradle/gradle.properties and fill in the values. +githubUser = YOUR_GITHUB_USERNAME +githubPassword = YOUR_GITHUB_PASSWORD +signing.keyId = 5669D902 +signing.password = SIGNING_PASSWORD +signing.secretKeyRingFile = SECRET_RING_FILE +ossrhUsername = launchdarkly +ossrhPassword = OSSHR_PASSWORD diff --git a/lib/shared/internal/gradle/wrapper/gradle-wrapper.jar b/lib/shared/internal/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..7454180 Binary files /dev/null and b/lib/shared/internal/gradle/wrapper/gradle-wrapper.jar differ diff --git a/lib/shared/internal/gradle/wrapper/gradle-wrapper.properties b/lib/shared/internal/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..070cb70 --- /dev/null +++ b/lib/shared/internal/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/lib/shared/internal/gradlew b/lib/shared/internal/gradlew new file mode 100755 index 0000000..1b6c787 --- /dev/null +++ b/lib/shared/internal/gradlew @@ -0,0 +1,234 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +APP_NAME="Gradle" +APP_BASE_NAME=${0##*/} + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/lib/shared/internal/gradlew.bat b/lib/shared/internal/gradlew.bat new file mode 100644 index 0000000..ac1b06f --- /dev/null +++ b/lib/shared/internal/gradlew.bat @@ -0,0 +1,89 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/lib/shared/internal/settings.gradle.kts b/lib/shared/internal/settings.gradle.kts new file mode 100644 index 0000000..58750cc --- /dev/null +++ b/lib/shared/internal/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "launchdarkly-java-sdk-internal" diff --git a/lib/shared/internal/src/androidTest/AndroidManifest.xml b/lib/shared/internal/src/androidTest/AndroidManifest.xml new file mode 100644 index 0000000..6a528ee --- /dev/null +++ b/lib/shared/internal/src/androidTest/AndroidManifest.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/lib/shared/internal/src/androidTest/java/com/launchdarkly/sdk/internal/BaseTest.java b/lib/shared/internal/src/androidTest/java/com/launchdarkly/sdk/internal/BaseTest.java new file mode 100644 index 0000000..af7768b --- /dev/null +++ b/lib/shared/internal/src/androidTest/java/com/launchdarkly/sdk/internal/BaseTest.java @@ -0,0 +1,12 @@ +package com.launchdarkly.sdk.internal; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import org.junit.runner.RunWith; + +/** + * When running our unit tests in Android, we substitute this version of BaseTest which provides + * the correct test runner. + */ +@RunWith(AndroidJUnit4.class) +public abstract class BaseTest extends BaseInternalTest { +} diff --git a/lib/shared/internal/src/main/AndroidManifest.xml b/lib/shared/internal/src/main/AndroidManifest.xml new file mode 100644 index 0000000..03982d1 --- /dev/null +++ b/lib/shared/internal/src/main/AndroidManifest.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/GsonHelpers.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/GsonHelpers.java new file mode 100644 index 0000000..b39928b --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/GsonHelpers.java @@ -0,0 +1,18 @@ +package com.launchdarkly.sdk.internal; + +import com.google.gson.Gson; + +/** + * General-purpose Gson helpers. + */ +public abstract class GsonHelpers { + private static final Gson GSON_INSTANCE = new Gson(); + + /** + * A singleton instance of Gson with the default configuration. + * @return a Gson instance + */ + public static Gson gsonInstance() { + return GSON_INSTANCE; + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessor.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessor.java new file mode 100644 index 0000000..1ad7a02 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessor.java @@ -0,0 +1,801 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.gson.Gson; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.internal.events.EventSummarizer.EventSummary; + +import java.io.BufferedWriter; +import java.io.ByteArrayOutputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.Semaphore; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +/** + * The internal component that processes and delivers analytics events. + *

+ * This component is not visible to application code; the SDKs may choose to expose an + * interface for customizing event behavior, but if so, their default implementations of + * the interface will delegate to this component rather than this component implementing + * the interface itself. This allows us to make changes as needed to the internal interface + * and event parameters without disrupting application code, and also to provide internal + * features that may not be relevant to some SDKs. + * + * The current implementation is really three components. DefaultEventProcessor is a simple + * facade that accepts event parameters (from SDK activity that might be happening on many + * threads) and pushes the events onto a queue. The queue is consumed by a single-threaded + * task run by EventDispatcher, which performs any necessary processing such as + * incrementing summary counters. When events are ready to deliver, it uses an + * implementation of EventSender (normally DefaultEventSender) to deliver the JSON data. + */ +public final class DefaultEventProcessor implements Closeable, EventProcessor { + private static final int INITIAL_OUTPUT_BUFFER_SIZE = 2000; + + private static final Gson gson = new Gson(); + + private final EventsConfiguration eventsConfig; + private final BlockingQueue inbox; + private final ScheduledExecutorService scheduler; + private final AtomicBoolean offline; + private final AtomicBoolean inBackground; + private final AtomicBoolean diagnosticInitSent = new AtomicBoolean(false); + private final AtomicBoolean closed = new AtomicBoolean(false); + private final Object stateLock = new Object(); + private ScheduledFuture eventFlushTask; + private ScheduledFuture contextKeysFlushTask; + private ScheduledFuture periodicDiagnosticEventTask; + private volatile boolean inputCapacityExceeded = false; + private final LDLogger logger; + + /** + * Creates an instance. + * + * @param eventsConfig the events configuration + * @param sharedExecutor used for scheduling repeating tasks + * @param threadPriority worker thread priority + * @param logger the logger + */ + public DefaultEventProcessor( + EventsConfiguration eventsConfig, + ScheduledExecutorService sharedExecutor, + int threadPriority, + LDLogger logger + ) { + this.eventsConfig = eventsConfig; + inbox = new ArrayBlockingQueue<>(eventsConfig.capacity); + + scheduler = sharedExecutor; + this.logger = logger; + + inBackground = new AtomicBoolean(eventsConfig.initiallyInBackground); + offline = new AtomicBoolean(eventsConfig.initiallyOffline); + + new EventDispatcher( + eventsConfig, + sharedExecutor, + threadPriority, + inbox, + inBackground, + offline, + closed, + logger + ); + // we don't need to save a reference to this - we communicate with it entirely through the inbox queue. + + // Decide whether to start scheduled tasks that depend on the background/offline state. + updateScheduledTasks(eventsConfig.initiallyInBackground, eventsConfig.initiallyOffline); + + // The context keys flush task should always be scheduled, if a contextDeduplicator exists. + if (eventsConfig.contextDeduplicator != null && eventsConfig.contextDeduplicator.getFlushInterval() != null) { + contextKeysFlushTask = enableOrDisableTask(true, null, + eventsConfig.contextDeduplicator.getFlushInterval().longValue(), MessageType.FLUSH_USERS); + } + } + + @Override + public void sendEvent(Event e) { + if (!closed.get()) { + postMessageAsync(MessageType.EVENT, e); + } + } + + @Override + public void flushAsync() { + if (!closed.get()) { + postMessageAsync(MessageType.FLUSH, null); + } + } + + @Override + public void flushBlocking() { + if (!closed.get()) { + postMessageAndWait(MessageType.FLUSH, null); + } + } + + @Override + public void setInBackground(boolean inBackground) { + synchronized (stateLock) { + if (this.inBackground.getAndSet(inBackground) == inBackground) { + // value was unchanged - nothing to do + return; + } + updateScheduledTasks(inBackground, offline.get()); + } + } + + @Override + public void setOffline(boolean offline) { + synchronized (stateLock) { + if (this.offline.getAndSet(offline) == offline) { + // value was unchanged - nothing to do + return; + } + updateScheduledTasks(inBackground.get(), offline); + } + } + + public void close() throws IOException { + if (closed.compareAndSet(false, true)) { + synchronized (stateLock) { + eventFlushTask = enableOrDisableTask(false, eventFlushTask, 0, null); + contextKeysFlushTask = enableOrDisableTask(false, contextKeysFlushTask, 0, null); + periodicDiagnosticEventTask = enableOrDisableTask(false, periodicDiagnosticEventTask, 0, null); + } + postMessageAsync(MessageType.FLUSH, null); + postMessageAndWait(MessageType.SHUTDOWN, null); + } + } + + void updateScheduledTasks(boolean inBackground, boolean offline) { + // The event flush task should be scheduled unless we're offline. + eventFlushTask = enableOrDisableTask( + !offline, + eventFlushTask, + eventsConfig.flushIntervalMillis, + MessageType.FLUSH + ); + + // The periodic diagnostic event task should be scheduled unless we're offline or in the background + // or there is no diagnostic store. + periodicDiagnosticEventTask = enableOrDisableTask( + !offline && !inBackground && eventsConfig.diagnosticStore != null, + periodicDiagnosticEventTask, + eventsConfig.diagnosticRecordingIntervalMillis, + MessageType.DIAGNOSTIC_STATS + ); + + if (!inBackground && !offline && !diagnosticInitSent.get() && eventsConfig.diagnosticStore != null) { + // Trigger a diagnostic init event if we never had the chance to send one before + postMessageAsync(MessageType.DIAGNOSTIC_INIT, null); + } + } + + ScheduledFuture enableOrDisableTask( + boolean shouldEnable, + ScheduledFuture currentTask, + long intervalMillis, + MessageType messageType + ) { + if (shouldEnable) { + if (currentTask != null) { + return currentTask; + } + ScheduledFuture task = this.scheduler.scheduleAtFixedRate( + postMessageRunnable(messageType, null), + intervalMillis, intervalMillis, TimeUnit.MILLISECONDS); + return task; + } else { + if (currentTask != null) { + currentTask.cancel(false); + } + return null; + } + } + + void waitUntilInactive() throws IOException { // visible for testing + postMessageAndWait(MessageType.SYNC, null); + } + + void postDiagnostic() { // visible for testing + postMessageAsync(MessageType.DIAGNOSTIC_STATS, null); + } + + private void postMessageAsync(MessageType type, Event event) { + postToChannel(new EventProcessorMessage(type, event, false)); + } + + private void postMessageAndWait(MessageType type, Event event) { + EventProcessorMessage message = new EventProcessorMessage(type, event, true); + if (postToChannel(message)) { + // COVERAGE: There is no way to reliably cause this to fail in tests + message.waitForCompletion(); + } + } + + private Runnable postMessageRunnable(final MessageType messageType, final Event event) { + return new Runnable() { + public void run() { + postMessageAsync(messageType, event); + } + }; + } + + private boolean postToChannel(EventProcessorMessage message) { + if (inbox.offer(message)) { + return true; + } + // If the inbox is full, it means the EventDispatcher thread is seriously backed up with not-yet-processed + // events. This is unlikely, but if it happens, it means the application is probably doing a ton of flag + // evaluations across many threads-- so if we wait for a space in the inbox, we risk a very serious slowdown + // of the app. To avoid that, we'll just drop the event. The log warning about this will only be shown once. + boolean alreadyLogged = inputCapacityExceeded; // possible race between this and the next line, but it's of no real consequence - we'd just get an extra log line + inputCapacityExceeded = true; + // COVERAGE: There is no way to reliably cause this condition in tests + if (!alreadyLogged) { + logger.warn("Events are being produced faster than they can be processed; some events will be dropped"); + } + return false; + } + + private static enum MessageType { + EVENT, + FLUSH, + FLUSH_USERS, + DIAGNOSTIC_INIT, + DIAGNOSTIC_STATS, + SYNC, + SHUTDOWN + } + + private static final class EventProcessorMessage { + private final MessageType type; + private final Event event; + private final Semaphore reply; + + private EventProcessorMessage(MessageType type, Event event, boolean sync) { + this.type = type; + this.event = event; + reply = sync ? new Semaphore(0) : null; + } + + void completed() { + if (reply != null) { + reply.release(); + } + } + + void waitForCompletion() { + if (reply == null) { // COVERAGE: there is no way to make this happen from test code + return; + } + while (true) { + try { + reply.acquire(); + return; + } + catch (InterruptedException ex) { // COVERAGE: there is no way to make this happen from test code. + } + } + } + +// intentionally commented out so this doesn't affect coverage reports when we're not debugging +// @Override +// public String toString() { // for debugging only +// return ((event == null) ? type.toString() : (type + ": " + event.getClass().getSimpleName())) + +// (reply == null ? "" : " (sync)"); +// } + } + + /** + * Takes messages from the input queue, updating the event buffer and summary counters + * on its own thread. + */ + static final class EventDispatcher { + private static final int MESSAGE_BATCH_SIZE = 50; + + final EventsConfiguration eventsConfig; // visible for testing + private final BlockingQueue inbox; + private final AtomicBoolean inBackground; + private final AtomicBoolean offline; + private final AtomicBoolean closed; + private final List flushWorkers; + private final AtomicInteger busyFlushWorkersCount; + private final AtomicLong lastKnownPastTime = new AtomicLong(0); + private final AtomicBoolean disabled = new AtomicBoolean(false); + private final AtomicBoolean didSendInitEvent = new AtomicBoolean(false); + final DiagnosticStore diagnosticStore; // visible for testing + private final EventContextDeduplicator contextDeduplicator; + private final ExecutorService sharedExecutor; + private final LDLogger logger; + + private long deduplicatedUsers = 0; + + private EventDispatcher( + EventsConfiguration eventsConfig, + ExecutorService sharedExecutor, + int threadPriority, + BlockingQueue inbox, + AtomicBoolean inBackground, + AtomicBoolean offline, + AtomicBoolean closed, + LDLogger logger + ) { + this.eventsConfig = eventsConfig; + this.inbox = inbox; + this.inBackground = inBackground; + this.offline = offline; + this.closed = closed; + this.sharedExecutor = sharedExecutor; + this.diagnosticStore = eventsConfig.diagnosticStore; + this.busyFlushWorkersCount = new AtomicInteger(0); + this.logger = logger; + + ThreadFactory threadFactory = new ThreadFactory() { + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r); + t.setDaemon(true);; + t.setName(String.format("LaunchDarkly-event-delivery-%d", t.getId())); + t.setPriority(threadPriority); + return t; + } + }; + + // This queue only holds one element; it represents a flush task that has not yet been + // picked up by any worker, so if we try to push another one and are refused, it means + // all the workers are busy. + final BlockingQueue payloadQueue = new ArrayBlockingQueue<>(1); + + final EventBuffer outbox = new EventBuffer(eventsConfig.capacity, logger); + this.contextDeduplicator = eventsConfig.contextDeduplicator; + + Thread mainThread = threadFactory.newThread(new Thread() { + public void run() { + runMainLoop(inbox, outbox, payloadQueue); + } + }); + mainThread.setDaemon(true); + + mainThread.setUncaughtExceptionHandler(this::onUncaughtException); + + mainThread.start(); + + flushWorkers = new ArrayList<>(); + EventResponseListener listener = this::handleResponse; + for (int i = 0; i < eventsConfig.eventSendingThreadPoolSize; i++) { + SendEventsTask task = new SendEventsTask( + eventsConfig, + listener, + payloadQueue, + busyFlushWorkersCount, + threadFactory, + logger + ); + flushWorkers.add(task); + } + } + + private void onUncaughtException(Thread thread, Throwable e) { + // The thread's main loop catches all exceptions, so we'll only get here if an Error was thrown. + // In that case, the application is probably already in a bad state, but we can try to degrade + // relatively gracefully by performing an orderly shutdown of the event processor, so the + // application won't end up blocking on a queue that's no longer being consumed. + // COVERAGE: there is no way to make this happen from test code. + + logger.error("Event processor thread was terminated by an unrecoverable error. No more analytics events will be sent. {} {}", + LogValues.exceptionSummary(e), LogValues.exceptionTrace(e)); + // Note that this is a rare case where we always log the exception stacktrace, instead of only + // logging it at debug level. That's because an exception of this kind should never happen and, + // if it happens, may be difficult to debug. + + // Flip the switch to prevent DefaultEventProcessor from putting any more messages on the queue + closed.set(true); + // Now discard everything that was on the queue, but also make sure no one was blocking on a message + List messages = new ArrayList(); + inbox.drainTo(messages); + for (EventProcessorMessage m: messages) { + m.completed(); + } + } + + /** + * This task drains the input queue as quickly as possible. Everything here is done on a single + * thread so we don't have to synchronize on our internal structures; when it's time to flush, + * triggerFlush will hand the events off to another task. + */ + private void runMainLoop( + BlockingQueue inbox, + EventBuffer outbox, + BlockingQueue payloadQueue + ) { + List batch = new ArrayList(MESSAGE_BATCH_SIZE); + while (true) { + try { + batch.clear(); + batch.add(inbox.take()); // take() blocks until a message is available + inbox.drainTo(batch, MESSAGE_BATCH_SIZE - 1); // this nonblocking call allows us to pick up more messages if available + for (EventProcessorMessage message: batch) { + switch (message.type) { // COVERAGE: adding a default branch does not prevent coverage warnings here due to compiler issues + case EVENT: + processEvent(message.event, outbox); + break; + case FLUSH: + if (!offline.get()) { + triggerFlush(outbox, payloadQueue); + } + break; + case FLUSH_USERS: + if (contextDeduplicator != null) { + contextDeduplicator.flush(); + } + break; + case DIAGNOSTIC_INIT: + if (!offline.get() && !inBackground.get() && !didSendInitEvent.get()) { + sharedExecutor.submit(createSendDiagnosticTask(diagnosticStore.getInitEvent())); + } + break; + case DIAGNOSTIC_STATS: + if (!offline.get() && !inBackground.get()) { + sendAndResetDiagnostics(outbox); + } + break; + case SYNC: // this is used only by unit tests + waitUntilAllFlushWorkersInactive(); + break; + case SHUTDOWN: + doShutdown(); + message.completed(); + return; // deliberately exit the thread loop + } + message.completed(); + } + } catch (InterruptedException e) { + } catch (Exception e) { // COVERAGE: there is no way to cause this condition in tests + logger.error("Unexpected error in event processor: {}", e.toString()); + logger.debug(e.toString(), e); + } + } + } + + private void sendAndResetDiagnostics(EventBuffer outbox) { + if (disabled.get()) { + return; + } + long droppedEvents = outbox.getAndClearDroppedCount(); + // We pass droppedEvents and deduplicatedUsers as parameters here because they are updated frequently in the main loop so we want to avoid synchronization on them. + DiagnosticEvent diagnosticEvent = diagnosticStore.createEventAndReset(droppedEvents, deduplicatedUsers); + deduplicatedUsers = 0; + sharedExecutor.submit(createSendDiagnosticTask(diagnosticEvent)); + } + + private void doShutdown() { + waitUntilAllFlushWorkersInactive(); + disabled.set(true); // In case there are any more messages, we want to ignore them + for (SendEventsTask task: flushWorkers) { + task.stop(); + } + try { + eventsConfig.eventSender.close(); + } catch (IOException e) { + logger.error("Unexpected error when closing event sender: {}", LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + } + } + + private void waitUntilAllFlushWorkersInactive() { + while (true) { + try { + synchronized(busyFlushWorkersCount) { + if (busyFlushWorkersCount.get() == 0) { + return; + } else { + busyFlushWorkersCount.wait(); + } + } + } catch (InterruptedException e) {} // COVERAGE: there is no way to cause this condition in tests + } + } + + private void processEvent(Event e, EventBuffer outbox) { + if (disabled.get()) { + return; + } + + // For migration events we process them and exit early. They cannot generate additional event types or be + // summarized. + if(e instanceof Event.MigrationOp) { + Event.MigrationOp me = (Event.MigrationOp)e; + if (Sampler.shouldSample(me.getSamplingRatio())) { + outbox.add(e); + } + return; + } + + LDContext context = e.getContext(); + if (context == null) { + return; // LDClient should never give us an event with no context + } + + // Decide whether to add the event to the payload. Feature events may be added twice, once for + // the event (if tracked) and once for debugging. + boolean addIndexEvent = false, + addFullEvent = false; + Event debugEvent = null; + + if (e instanceof Event.FeatureRequest) { + Event.FeatureRequest fe = (Event.FeatureRequest)e; + if(!fe.isExcludeFromSummaries()) { + outbox.addToSummary(fe); + } + addFullEvent = fe.isTrackEvents(); + if (shouldDebugEvent(fe)) { + debugEvent = fe.toDebugEvent(); + } + } else { + addFullEvent = true; + } + + // For each context we haven't seen before, we add an index event - unless this is already + // an identify event for that context. + if (context != null && context.getFullyQualifiedKey() != null) { + if (e instanceof Event.FeatureRequest || e instanceof Event.Custom) { + if (contextDeduplicator != null) { + // Add to the set of contexts we've noticed + addIndexEvent = contextDeduplicator.processContext(context); + if (!addIndexEvent) { + deduplicatedUsers++; + } + } + } else if (e instanceof Event.Identify) { + if (contextDeduplicator != null) { + contextDeduplicator.processContext(context); // just mark that we've seen it + } + } + } + + if (addIndexEvent) { + Event.Index ie = new Event.Index(e.getCreationDate(), e.getContext()); + outbox.add(ie); + } + if (addFullEvent && Sampler.shouldSample(e.getSamplingRatio())) { + outbox.add(e); + } + if (debugEvent != null && Sampler.shouldSample(e.getSamplingRatio())) { + outbox.add(debugEvent); + } + } + + private boolean shouldDebugEvent(Event.FeatureRequest fe) { + Long maybeDate = fe.getDebugEventsUntilDate(); + if (maybeDate == null) { + return false; + } + long debugEventsUntilDate = maybeDate.longValue(); + if (debugEventsUntilDate > 0) { + // The "last known past time" comes from the last HTTP response we got from the server. + // In case the client's time is set wrong, at least we know that any expiration date + // earlier than that point is definitely in the past. If there's any discrepancy, we + // want to err on the side of cutting off event debugging sooner. + long lastPast = lastKnownPastTime.get(); + if (debugEventsUntilDate > lastPast && + debugEventsUntilDate > System.currentTimeMillis()) { + return true; + } + } + return false; + } + + private void triggerFlush(EventBuffer outbox, BlockingQueue payloadQueue) { + if (disabled.get() || outbox.isEmpty()) { + return; + } + FlushPayload payload = outbox.getPayload(); + if (diagnosticStore != null) { + int eventCount = payload.events.length + (payload.summary.isEmpty() ? 0 : 1); + diagnosticStore.recordEventsInBatch(eventCount); + } + busyFlushWorkersCount.incrementAndGet(); + if (payloadQueue.offer(payload)) { + // These events now belong to the next available flush worker, so drop them from our state + outbox.clear(); + } else { + logger.debug("Skipped flushing because all workers are busy"); + // All the workers are busy so we can't flush now; keep the events in our state + outbox.summarizer.restoreTo(payload.summary); + synchronized(busyFlushWorkersCount) { + busyFlushWorkersCount.decrementAndGet(); + busyFlushWorkersCount.notify(); + } + } + } + + private void handleResponse(EventSender.Result result) { + if (result.getTimeFromServer() != null) { + lastKnownPastTime.set(result.getTimeFromServer().getTime()); + } + if (result.isMustShutDown()) { + disabled.set(true); + } + } + + private Runnable createSendDiagnosticTask(final DiagnosticEvent diagnosticEvent) { + return new Runnable() { + @Override + public void run() { + try { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(INITIAL_OUTPUT_BUFFER_SIZE); + Writer writer = new BufferedWriter(new OutputStreamWriter(buffer, Charset.forName("UTF-8")), INITIAL_OUTPUT_BUFFER_SIZE); + gson.toJson(diagnosticEvent.value, writer); + writer.flush(); + EventSender.Result result = eventsConfig.eventSender.sendDiagnosticEvent( + buffer.toByteArray(), eventsConfig.eventsUri); + handleResponse(result); + if (diagnosticEvent.initEvent) { + didSendInitEvent.set(true); + } + } catch (Exception e) { + logger.error("Unexpected error in event processor: {}", e.toString()); + logger.debug(e.toString(), e); + } + } + }; + } + } + + private static final class EventBuffer { + final List events = new ArrayList<>(); + final EventSummarizer summarizer = new EventSummarizer(); + private final int capacity; + private final LDLogger logger; + private boolean capacityExceeded = false; + private long droppedEventCount = 0; + + EventBuffer(int capacity, LDLogger logger) { + this.capacity = capacity; + this.logger = logger; + } + + void add(Event e) { + if (events.size() >= capacity) { + if (!capacityExceeded) { // don't need AtomicBoolean, this is only checked on one thread + capacityExceeded = true; + logger.warn("Exceeded event queue capacity. Increase capacity to avoid dropping events."); + } + droppedEventCount++; + } else { + capacityExceeded = false; + events.add(e); + } + } + + void addToSummary(Event.FeatureRequest e) { + summarizer.summarizeEvent( + e.getCreationDate(), + e.getKey(), + e.getVersion(), + e.getVariation(), + e.getValue(), + e.getDefaultVal(), + e.getContext() + ); + } + + boolean isEmpty() { + return events.isEmpty() && summarizer.isEmpty(); + } + + long getAndClearDroppedCount() { + long res = droppedEventCount; + droppedEventCount = 0; + return res; + } + + FlushPayload getPayload() { + Event[] eventsOut = events.toArray(new Event[events.size()]); + EventSummarizer.EventSummary summary = summarizer.getSummaryAndReset(); + return new FlushPayload(eventsOut, summary); + } + + void clear() { + events.clear(); + summarizer.clear(); + } + } + + private static final class FlushPayload { + final Event[] events; + final EventSummary summary; + + FlushPayload(Event[] events, EventSummary summary) { + this.events = events; + this.summary = summary; + } + } + + private static interface EventResponseListener { + void handleResponse(EventSender.Result result); + } + + private static final class SendEventsTask implements Runnable { + private final EventsConfiguration eventsConfig; + private final EventResponseListener responseListener; + private final BlockingQueue payloadQueue; + private final AtomicInteger activeFlushWorkersCount; + private final AtomicBoolean stopping; + private final EventOutputFormatter formatter; + private final Thread thread; + private final LDLogger logger; + + SendEventsTask( + EventsConfiguration eventsConfig, + EventResponseListener responseListener, + BlockingQueue payloadQueue, + AtomicInteger activeFlushWorkersCount, + ThreadFactory threadFactory, + LDLogger logger + ) { + this.eventsConfig = eventsConfig; + this.formatter = new EventOutputFormatter(eventsConfig); + this.responseListener = responseListener; + this.payloadQueue = payloadQueue; + this.activeFlushWorkersCount = activeFlushWorkersCount; + this.stopping = new AtomicBoolean(false); + this.logger = logger; + thread = threadFactory.newThread(this); + thread.setDaemon(true); + thread.start(); + } + + public void run() { + while (!stopping.get()) { + FlushPayload payload = null; + try { + payload = payloadQueue.take(); + } catch (InterruptedException e) { + continue; + } + try { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(INITIAL_OUTPUT_BUFFER_SIZE); + Writer writer = new BufferedWriter(new OutputStreamWriter(buffer, Charset.forName("UTF-8")), INITIAL_OUTPUT_BUFFER_SIZE); + int outputEventCount = formatter.writeOutputEvents(payload.events, payload.summary, writer); + writer.flush(); + EventSender.Result result = eventsConfig.eventSender.sendAnalyticsEvents( + buffer.toByteArray(), + outputEventCount, + eventsConfig.eventsUri + ); + responseListener.handleResponse(result); + } catch (Exception e) { + logger.error("Unexpected error in event processor: {}", LogValues.exceptionSummary(e)); + logger.debug(LogValues.exceptionTrace(e)); + } + synchronized (activeFlushWorkersCount) { + activeFlushWorkersCount.decrementAndGet(); + activeFlushWorkersCount.notifyAll(); + } + } + } + + void stop() { + stopping.set(true); + thread.interrupt(); + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventSender.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventSender.java new file mode 100644 index 0000000..82ffbfb --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DefaultEventSender.java @@ -0,0 +1,221 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogValues; +import com.launchdarkly.sdk.internal.http.HttpHelpers; +import com.launchdarkly.sdk.internal.http.HttpProperties; + +import java.io.IOException; +import java.net.URI; +import java.nio.charset.Charset; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; +import java.util.UUID; + +import static com.launchdarkly.sdk.internal.http.HttpErrors.checkIfErrorIsRecoverableAndLog; +import static com.launchdarkly.sdk.internal.http.HttpErrors.httpErrorDescription; + +import okhttp3.Headers; +import okhttp3.MediaType; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +/** + * The default implementation of delivering JSON data to an LaunchDarkly event endpoint. + * This is the only implementation that is used by the SDKs. It is abstracted out with an + * interface for the sake of testability. + */ +public final class DefaultEventSender implements EventSender { + /** + * Default value for {@code retryDelayMillis} parameter. + */ + public static final long DEFAULT_RETRY_DELAY_MILLIS = 1000; + + /** + * Default value for {@code analyticsRequestPath} parameter, for the server-side SDK. + * The Android SDK should modify this value. + */ + public static final String DEFAULT_ANALYTICS_REQUEST_PATH = "/bulk"; + + /** + * Default value for {@code diagnosticRequestPath} parameter, for the server-side SDK. + * The Android SDK should modify this value. + */ + public static final String DEFAULT_DIAGNOSTIC_REQUEST_PATH = "/diagnostic"; + + private static final String EVENT_SCHEMA_HEADER = "X-LaunchDarkly-Event-Schema"; + private static final String EVENT_SCHEMA_VERSION = "4"; + private static final String EVENT_PAYLOAD_ID_HEADER = "X-LaunchDarkly-Payload-ID"; + private static final MediaType JSON_CONTENT_TYPE = MediaType.parse("application/json; charset=utf-8"); + private static final SimpleDateFormat HTTP_DATE_FORMAT = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", + Locale.US); // server dates as defined by RFC-822/RFC-1123 use English day/month names + private static final Object HTTP_DATE_FORMAT_LOCK = new Object(); // synchronize on this because DateFormat isn't thread-safe + + private final OkHttpClient httpClient; + private final boolean shouldCloseHttpClient; + private final Headers baseHeaders; + private final String analyticsRequestPath; + private final String diagnosticRequestPath; + final long retryDelayMillis; // visible for testing + private final LDLogger logger; + + /** + * Creates an instance. + * + * @param httpProperties the HTTP configuration + * @param analyticsRequestPath the request path for posting analytics events + * @param diagnosticRequestPath the request path for posting diagnostic events + * @param retryDelayMillis retry delay, or zero to use the default + * @param logger the logger + */ + public DefaultEventSender( + HttpProperties httpProperties, + String analyticsRequestPath, + String diagnosticRequestPath, + long retryDelayMillis, + LDLogger logger + ) { + if (httpProperties.getSharedHttpClient() == null) { + this.httpClient = httpProperties.toHttpClientBuilder().build(); + shouldCloseHttpClient = true; + } else { + this.httpClient = httpProperties.getSharedHttpClient(); + shouldCloseHttpClient = false; + } + this.logger = logger; + + this.baseHeaders = httpProperties.toHeadersBuilder() + .add("Content-Type", "application/json") + .build(); + + this.analyticsRequestPath = analyticsRequestPath == null ? DEFAULT_ANALYTICS_REQUEST_PATH : analyticsRequestPath; + this.diagnosticRequestPath = diagnosticRequestPath == null ? DEFAULT_DIAGNOSTIC_REQUEST_PATH : diagnosticRequestPath; + + this.retryDelayMillis = retryDelayMillis <= 0 ? DEFAULT_RETRY_DELAY_MILLIS : retryDelayMillis; + } + + @Override + public void close() throws IOException { + if (shouldCloseHttpClient) { + HttpProperties.shutdownHttpClient(httpClient); + } + } + + @Override + public Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri) { + return sendEventData(false, data, eventCount, eventsBaseUri); + } + + @Override + public Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri) { + return sendEventData(true, data, 1, eventsBaseUri); + } + + private Result sendEventData(boolean isDiagnostic, byte[] data, int eventCount, URI eventsBaseUri) { + if (data == null || data.length == 0) { + // DefaultEventProcessor won't normally pass us an empty payload, but if it does, don't bother sending + return new Result(true, false, null); + } + + Headers.Builder headersBuilder = baseHeaders.newBuilder(); + String path; + String description; + + if (isDiagnostic) { + path = diagnosticRequestPath; + description = "diagnostic event"; + } else { + path = analyticsRequestPath; + String eventPayloadId = UUID.randomUUID().toString(); + headersBuilder.add(EVENT_PAYLOAD_ID_HEADER, eventPayloadId); + headersBuilder.add(EVENT_SCHEMA_HEADER, EVENT_SCHEMA_VERSION); + description = String.format("%d event(s)", eventCount); + } + + URI uri = HttpHelpers.concatenateUriPath(eventsBaseUri, path); + Headers headers = headersBuilder.build(); + RequestBody body = RequestBody.create(data, JSON_CONTENT_TYPE); + boolean mustShutDown = false; + + logger.debug("Posting {} to {} with payload: {}", description, uri, + LogValues.defer(new LazilyPrintedUtf8Data(data))); + + for (int attempt = 0; attempt < 2; attempt++) { + if (attempt > 0) { + logger.warn("Will retry posting {} after {}ms", description, retryDelayMillis); + try { + Thread.sleep(retryDelayMillis); + } catch (InterruptedException e) { // COVERAGE: there's no way to cause this in tests + } + } + + Request request = new Request.Builder() + .url(uri.toASCIIString()) + .post(body) + .headers(headers) + .build(); + + long startTime = System.currentTimeMillis(); + String nextActionMessage = attempt == 0 ? "will retry" : "some events were dropped"; + String errorContext = "posting " + description; + + try (Response response = httpClient.newCall(request).execute()) { + long endTime = System.currentTimeMillis(); + logger.debug("{} delivery took {} ms, response status {}", description, endTime - startTime, response.code()); + + if (response.isSuccessful()) { + return new Result(true, false, parseResponseDate(response)); + } + + String errorDesc = httpErrorDescription(response.code()); + boolean recoverable = checkIfErrorIsRecoverableAndLog( + logger, + errorDesc, + errorContext, + response.code(), + nextActionMessage + ); + if (!recoverable) { + mustShutDown = true; + break; + } + } catch (IOException e) { + checkIfErrorIsRecoverableAndLog(logger, e.toString(), errorContext, 0, nextActionMessage); + } + } + + return new Result(false, mustShutDown, null); + } + + private final Date parseResponseDate(Response response) { + String dateStr = response.header("Date"); + if (dateStr != null) { + try { + // DateFormat is not thread-safe, so must synchronize + synchronized (HTTP_DATE_FORMAT_LOCK) { + return HTTP_DATE_FORMAT.parse(dateStr); + } + } catch (ParseException e) { + logger.warn("Received invalid Date header from events service"); + } + } + return null; + } + + private final class LazilyPrintedUtf8Data implements LogValues.StringProvider { + private final byte[] data; + + LazilyPrintedUtf8Data(byte[] data) { + this.data = data; + } + + @Override + public String get() { + return data == null ? "" : new String(data, Charset.forName("UTF-8")); + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticConfigProperty.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticConfigProperty.java new file mode 100644 index 0000000..e292690 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticConfigProperty.java @@ -0,0 +1,47 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDValueType; + +/** + * Defines the standard properties that are allowed in the configuration section of a diagnostic + * initialization event. + */ +@SuppressWarnings("javadoc") +public enum DiagnosticConfigProperty { + ALL_ATTRIBUTES_PRIVATE("allAttributesPrivate", LDValueType.BOOLEAN), + CONNECT_TIMEOUT_MILLIS("connectTimeoutMillis", LDValueType.NUMBER), + CUSTOM_BASE_URI("customBaseURI", LDValueType.BOOLEAN), + CUSTOM_EVENTS_URI("customEventsURI", LDValueType.BOOLEAN), + CUSTOM_STREAM_URI("customStreamURI", LDValueType.BOOLEAN), + DATA_STORE_TYPE("dataStoreType", LDValueType.STRING), + DIAGNOSTIC_RECORDING_INTERVAL_MILLIS("diagnosticRecordingIntervalMillis", LDValueType.NUMBER), + EVENTS_CAPACITY("eventsCapacity", LDValueType.NUMBER), + EVENTS_FLUSH_INTERVAL_MILLIS("eventsFlushIntervalMillis", LDValueType.NUMBER), + POLLING_INTERVAL_MILLIS("pollingIntervalMillis", LDValueType.NUMBER), + RECONNECT_TIME_MILLIS("reconnectTimeMillis", LDValueType.NUMBER), + SAMPLING_INTERVAL("samplingInterval", LDValueType.NUMBER), + SOCKET_TIMEOUT_MILLIS("socketTimeoutMillis", LDValueType.NUMBER), + START_WAIT_MILLIS("startWaitMillis", LDValueType.NUMBER), + STREAMING_DISABLED("streamingDisabled", LDValueType.BOOLEAN), + USER_KEYS_CAPACITY("userKeysCapacity", LDValueType.NUMBER), + USER_KEYS_FLUSH_INTERVAL_MILLIS("userKeysFlushIntervalMillis", LDValueType.NUMBER), + USING_PROXY("usingProxy", LDValueType.BOOLEAN), + USING_PROXY_AUTHENTICATOR("usingProxyAuthenticator", LDValueType.BOOLEAN), + USING_RELAY_DAEMON("usingRelayDaemon", LDValueType.BOOLEAN), + + // the following properties are used only in Android + BACKGROUND_POLLING_INTERVAL_MILLIS("backgroundPollingIntervalMillis", LDValueType.NUMBER), + BACKGROUND_POLLING_DISABLED("backgroundPollingDisabled", LDValueType.BOOLEAN), + EVALUATION_REASONS_REQUESTED("evaluationReasonsRequested", LDValueType.BOOLEAN), + MAX_CACHED_USERS("maxCachedUsers", LDValueType.NUMBER), + MOBILE_KEY_COUNT("mobileKeyCount", LDValueType.NUMBER), + USE_REPORT("useReport", LDValueType.BOOLEAN); + + public final String name; + public final LDValueType type; + + private DiagnosticConfigProperty(String name, LDValueType type) { + this.name = name; + this.type = type; + } +} \ No newline at end of file diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticEvent.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticEvent.java new file mode 100644 index 0000000..351912e --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticEvent.java @@ -0,0 +1,96 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.ArrayBuilder; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; + +import java.util.List; + +/** + * Base class for diagnostic events. This class and its subclasses are used only for JSON serialization. + */ +public class DiagnosticEvent { + final boolean initEvent; + final LDValue value; + + private DiagnosticEvent(boolean initEvent, LDValue value) { + this.initEvent = initEvent; + this.value = value; + } + + /** + * Returns the JSON representation of the event. + * @return the JSON representation as an {@link LDValue} + */ + public LDValue getJsonValue() { + return value; + } + + static DiagnosticEvent makeInit( + long creationDate, + DiagnosticId diagnosticId, + LDValue sdk, + LDValue configuration, + LDValue platform + ) { + return new DiagnosticEvent( + true, + baseBuilder("diagnostic-init", creationDate, diagnosticId) + .put("sdk", sdk) + .put("configuration", configuration) + .put("platform", platform) + .build() + ); + } + + static DiagnosticEvent makeStatistics( + long creationDate, + DiagnosticId diagnosticId, + long dataSinceDate, + long droppedEvents, + long deduplicatedUsers, + long eventsInLastBatch, + List streamInits + ) { + ObjectBuilder b = baseBuilder("diagnostic", creationDate, diagnosticId) + .put("dataSinceDate", dataSinceDate) + .put("droppedEvents", droppedEvents) + .put("deduplicatedUsers", deduplicatedUsers) + .put("eventsInLastBatch", eventsInLastBatch); + ArrayBuilder ab = LDValue.buildArray(); + if (streamInits != null) { + for (StreamInit si: streamInits) { + ab.add(LDValue.buildObject() + .put("timestamp", si.timestamp) + .put("durationMillis", si.durationMillis) + .put("failed", si.failed) + .build()); + } + } + b.put("streamInits", ab.build()); + return new DiagnosticEvent(false, b.build()); + } + + private static ObjectBuilder baseBuilder(String kind, long creationDate, DiagnosticId id) { + return LDValue.buildObject() + .put("kind", kind) + .put("creationDate", creationDate) + .put("id", LDValue.buildObject() + .put("diagnosticId", id.diagnosticId) + .put("sdkKeySuffix", id.sdkKeySuffix) + .build() + ); + } + + static class StreamInit { + final long timestamp; + final long durationMillis; + final boolean failed; + + StreamInit(long timestamp, long durationMillis, boolean failed) { + this.timestamp = timestamp; + this.durationMillis = durationMillis; + this.failed = failed; + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticId.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticId.java new file mode 100644 index 0000000..755656a --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticId.java @@ -0,0 +1,17 @@ +package com.launchdarkly.sdk.internal.events; + +import java.util.UUID; + +class DiagnosticId { + + final String diagnosticId = UUID.randomUUID().toString(); + final String sdkKeySuffix; + + DiagnosticId(String sdkKey) { + if (sdkKey == null) { + sdkKeySuffix = null; + } else { + this.sdkKeySuffix = sdkKey.substring(Math.max(0, sdkKey.length() - 6)); + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticStore.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticStore.java new file mode 100644 index 0000000..9f938d5 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/DiagnosticStore.java @@ -0,0 +1,206 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; +import com.launchdarkly.sdk.ObjectBuilder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + +/** + * Implementation of basic diagnostic event creation. Platform-specific details are provided in + * SdkDiagnosticParams. + */ +public final class DiagnosticStore { + private final DiagnosticId diagnosticId; + private final long creationDate; + private final SdkDiagnosticParams diagnosticParams; + + private volatile long dataSinceDate; + private final AtomicInteger eventsInLastBatch = new AtomicInteger(0); + private final Object streamInitsLock = new Object(); + private ArrayList streamInits = new ArrayList<>(); + + /** + * Parameters for creating a DiagnosticStore. + */ + public static class SdkDiagnosticParams { + final String sdkKeyOrMobileKey; + final String sdkName; + final String sdkVersion; + final String platformName; + final LDValue extraPlatformData; + final Map defaultHttpHeaders; + final List configProperties; + + /** + * Creates an instance. + * + * @param sdkKeyOrMobileKey the SDK key or mobile key + * @param sdkName the SDK name as represented in diagnostic events + * @param sdkVersion the version string + * @param platformName the platform name as represented in diagnostic events + * @param extraPlatformData optional JSON object for platform properties + * @param defaultHttpHeaders from the HTTP configuration (we get the wrapper name from this) + * @param configProperties optional JSON object for any additional config properties + */ + public SdkDiagnosticParams( + String sdkKeyOrMobileKey, + String sdkName, + String sdkVersion, + String platformName, + LDValue extraPlatformData, + Map defaultHttpHeaders, + List configProperties + ) { + this.sdkKeyOrMobileKey = sdkKeyOrMobileKey; + this.sdkName = sdkName; + this.sdkVersion = sdkVersion; + this.platformName = platformName; + this.extraPlatformData = extraPlatformData; + this.defaultHttpHeaders = defaultHttpHeaders == null ? emptyMap() : new HashMap<>(defaultHttpHeaders); + this.configProperties = configProperties == null ? emptyList() : new ArrayList(configProperties); + } + } + + /** + * Constructs an instance. + * + * @param params the diagnostic properties + */ + public DiagnosticStore(SdkDiagnosticParams params) { + this.creationDate = this.dataSinceDate = System.currentTimeMillis(); + this.diagnosticId = new DiagnosticId(params.sdkKeyOrMobileKey); + this.diagnosticParams = params; + } + + /** + * Returns the unique diagnostic identifier. + * + * @return the identifier + */ + public DiagnosticId getDiagnosticId() { + return diagnosticId; + } + + /** + * Returns the millisecond timestamp when the current diagnostic stats began. + * + * @return the timestamp + */ + public long getDataSinceDate() { + return dataSinceDate; + } + + /** + * Returns the initial diagnostic event as a JSON object. + * + * @return the initial event + */ + public DiagnosticEvent getInitEvent() { + return DiagnosticEvent.makeInit(creationDate, diagnosticId, + makeInitEventSdkData(), makeInitEventConfigData(), makeInitEventPlatformData()); + } + + private LDValue makeInitEventSdkData() { + ObjectBuilder b = LDValue.buildObject() + .put("name", diagnosticParams.sdkName) + .put("version", diagnosticParams.sdkVersion); + for (Map.Entry kv: diagnosticParams.defaultHttpHeaders.entrySet()) { + if (kv.getKey().equalsIgnoreCase("x-launchdarkly-wrapper")) { + if (kv.getValue().contains("/")) { + b.put("wrapperName", kv.getValue().substring(0, kv.getValue().indexOf("/"))); + b.put("wrapperVersion", kv.getValue().substring(kv.getValue().indexOf("/") + 1)); + } else { + b.put("wrapperName", kv.getValue()); + } + } + } + return b.build(); + } + + private LDValue makeInitEventConfigData() { + ObjectBuilder b = LDValue.buildObject(); + for (LDValue configProps: diagnosticParams.configProperties) { + if (configProps == null || configProps.getType() != LDValueType.OBJECT) { + continue; + } + for (String prop: configProps.keys()) { + // filter this to make sure a badly-behaved custom component doesn't inject weird + // properties that will confuse the event recorder + for (DiagnosticConfigProperty p: DiagnosticConfigProperty.values()) { + if (p.name.equals(prop)) { + LDValue value = configProps.get(prop); + if (value.getType() == p.type) { + b.put(prop, value); + } + break; + } + } + } + } + return b.build(); + } + + private LDValue makeInitEventPlatformData() { + ObjectBuilder b = LDValue.buildObject() + .put("name", diagnosticParams.platformName) + .put("osArch", System.getProperty("os.arch")) + .put("osVersion", System.getProperty("os.version")); + if (diagnosticParams.extraPlatformData != null) { + for (String key: diagnosticParams.extraPlatformData.keys()) { + b.put(key, diagnosticParams.extraPlatformData.get(key)); + } + } + return b.build(); + } + + /** + * Records a successful or failed stream initialization. + * + * @param timestamp the millisecond timestamp + * @param durationMillis how long the initialization took + * @param failed true if failed + */ + public void recordStreamInit(long timestamp, long durationMillis, boolean failed) { + synchronized (streamInitsLock) { + streamInits.add(new DiagnosticEvent.StreamInit(timestamp, durationMillis, failed)); + } + } + + /** + * Records the number of events in the last flush payload. + * + * @param eventsInBatch the event count + */ + public void recordEventsInBatch(int eventsInBatch) { + eventsInLastBatch.set(eventsInBatch); + } + + /** + * Creates a statistics event and then resets the counters. + * + * @param droppedEvents number of dropped events + * @param deduplicatedContexts number of deduplicated contexts + * @return the event + */ + public DiagnosticEvent createEventAndReset(long droppedEvents, long deduplicatedContexts) { + long currentTime = System.currentTimeMillis(); + List eventInits; + synchronized (streamInitsLock) { + eventInits = streamInits; + streamInits = new ArrayList<>(); + } + long eventsInBatch = eventsInLastBatch.getAndSet(0); + DiagnosticEvent res = DiagnosticEvent.makeStatistics(currentTime, diagnosticId, dataSinceDate, droppedEvents, + deduplicatedContexts, eventsInBatch, eventInits); + dataSinceDate = currentTime; + return res; + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Event.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Event.java new file mode 100644 index 0000000..16c5b66 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Event.java @@ -0,0 +1,688 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +/** + * Base class for all analytics events that are generated by the client. Also defines all of its own subclasses. + *

+ * These types are not visible to applications; they are an implementation detail of the default event + * processor. + */ +public class Event { + private final long creationDate; + private final LDContext context; + + /** + * Base event constructor. + * + * @param creationDate the timestamp in milliseconds + * @param context the context associated with the event + */ + public Event(long creationDate, LDContext context) { + this.creationDate = creationDate; + this.context = context; + } + + /** + * The event timestamp. + * + * @return the timestamp in milliseconds + */ + public long getCreationDate() { + return creationDate; + } + + /** + * The context associated with the event. + * + * @return the context object + */ + public LDContext getContext() { + return context; + } + + /** + * Ratio used for sampling the event. The default sampling ratio is 1. + *

+ * Currently, sampling applies to feature, debug, and migration events. + * + * @return the sampling ratio + */ + public long getSamplingRatio() { + return 1; + } + + /** + * A custom event created with one of the SDK's {@code track} methods. + */ + public static final class Custom extends Event { + private final String key; + private final LDValue data; + private final Double metricValue; + + /** + * Constructs a custom event. + * + * @param timestamp the timestamp in milliseconds + * @param key the event key + * @param context the context associated with the event + * @param data custom data if any (null is the same as {@link LDValue#ofNull()}) + * @param metricValue custom metric value if any + */ + public Custom(long timestamp, String key, LDContext context, LDValue data, Double metricValue) { + super(timestamp, context); + this.key = key; + this.data = LDValue.normalize(data); + this.metricValue = metricValue; + } + + /** + * The custom event key. + * + * @return the event key + */ + public String getKey() { + return key; + } + + /** + * The custom data associated with the event, if any. + * + * @return the event data (null is equivalent to {@link LDValue#ofNull()}) + */ + public LDValue getData() { + return data; + } + + /** + * The numeric metric value associated with the event, if any. + * + * @return the metric value or null + */ + public Double getMetricValue() { + return metricValue; + } + } + + /** + * An event created with the SDK's {@code identify} method (or generated automatically at startup + * if it is a client-side SDK). + */ + public static final class Identify extends Event { + /** + * Constructs an identify event. + * + * @param timestamp the timestamp in milliseconds + * @param context the context associated with the event + */ + public Identify(long timestamp, LDContext context) { + super(timestamp, context); + } + } + + /** + * An event created internally by the SDK to hold user data that may be referenced by multiple events. + */ + public static final class Index extends Event { + /** + * Constructs an index event. + * + * @param timestamp the timestamp in milliseconds + * @param context the context associated with the event + */ + public Index(long timestamp, LDContext context) { + super(timestamp, context); + } + } + + /** + * An event generated by a feature flag evaluation. + */ + public static final class FeatureRequest extends Event { + private final String key; + private final int variation; + private final LDValue value; + private final LDValue defaultVal; + private final int version; + private final String prereqOf; + private final boolean trackEvents; + private final Long debugEventsUntilDate; + private final EvaluationReason reason; + private final boolean debug; + private final long samplingRatio; + private final boolean excludeFromSummaries; + + /** + * Constructs a feature request event. + * + * @param timestamp the timestamp in milliseconds + * @param key the flag key + * @param context the context associated with the event + * @param version the flag version, or -1 if the flag was not found + * @param variation the result variation, or -1 if there was an error + * @param value the result value + * @param defaultVal the default value passed by the application + * @param reason the evaluation reason, if it is to be included in the event + * @param prereqOf if this flag was evaluated as a prerequisite, this is the key of the flag that referenced it + * @param trackEvents true if full event tracking is turned on for this flag + * @param debugEventsUntilDate if non-null, the time until which event debugging should be enabled + * @param debug true if this is a debugging event + * @param excludeFromSummaries true to exclude the event from summaries + * @param samplingRatio the sampling ratio for the event + */ + public FeatureRequest(long timestamp, String key, LDContext context, int version, int variation, LDValue value, + LDValue defaultVal, EvaluationReason reason, String prereqOf, boolean trackEvents, + Long debugEventsUntilDate, boolean debug, long samplingRatio, boolean excludeFromSummaries) { + super(timestamp, context); + this.key = key; + this.version = version; + this.variation = variation; + this.value = value; + this.defaultVal = defaultVal; + this.prereqOf = prereqOf; + this.trackEvents = trackEvents; + this.debugEventsUntilDate = debugEventsUntilDate; + this.reason = reason; + this.debug = debug; + this.excludeFromSummaries = excludeFromSummaries; + this.samplingRatio = samplingRatio; + } + + /** + * Constructs a feature request event. + *

+ * This version of the constructor uses default values for the samplingRatio (1) and excludeFromSummaries (false). + * + * @param timestamp the timestamp in milliseconds + * @param key the flag key + * @param context the context associated with the event + * @param version the flag version, or -1 if the flag was not found + * @param variation the result variation, or -1 if there was an error + * @param value the result value + * @param defaultVal the default value passed by the application + * @param reason the evaluation reason, if it is to be included in the event + * @param prereqOf if this flag was evaluated as a prerequisite, this is the key of the flag that referenced it + * @param trackEvents true if full event tracking is turned on for this flag + * @param debugEventsUntilDate if non-null, the time until which event debugging should be enabled + * @param debug true if this is a debugging event + */ + public FeatureRequest(long timestamp, String key, LDContext context, int version, int variation, LDValue value, + LDValue defaultVal, EvaluationReason reason, String prereqOf, boolean trackEvents, + Long debugEventsUntilDate, boolean debug) { + this(timestamp, key, context, version, variation, value, defaultVal, reason, prereqOf, trackEvents, + debugEventsUntilDate, debug, 1, false); + } + + /** + * The key of the feature flag that was evaluated. + * + * @return the flag key + */ + public String getKey() { + return key; + } + + /** + * The index of the selected flag variation, or -1 if the application default value was used. + * + * @return zero-based index of the variation, or -1 + */ + public int getVariation() { + return variation; + } + + /** + * The value of the selected flag variation. + * + * @return the value + */ + public LDValue getValue() { + return value; + } + + /** + * The application default value used in the evaluation. + * + * @return the application default + */ + public LDValue getDefaultVal() { + return defaultVal; + } + + /** + * The version of the feature flag that was evaluated, or -1 if the flag was not found. + * + * @return the flag version or null + */ + public int getVersion() { + return version; + } + + /** + * If this flag was evaluated as a prerequisite for another flag, the key of the other flag. + * + * @return a flag key or null + */ + public String getPrereqOf() { + return prereqOf; + } + + /** + * True if full event tracking is enabled for this flag. + * + * @return true if full event tracking is on + */ + public boolean isTrackEvents() { + return trackEvents; + } + + /** + * If debugging is enabled for this flag, the Unix millisecond time at which to stop debugging. + * + * @return a timestamp or null + */ + public Long getDebugEventsUntilDate() { + return debugEventsUntilDate; + } + + /** + * The {@link EvaluationReason} for this evaluation, or null if the reason was not requested for this evaluation. + * + * @return a reason object or null + */ + public EvaluationReason getReason() { + return reason; + } + + /** + * True if this event was generated due to debugging being enabled. + * + * @return true if this is a debug event + */ + public boolean isDebug() { + return debug; + } + + public boolean isExcludeFromSummaries() { + return excludeFromSummaries; + } + + @Override + public long getSamplingRatio() { + return samplingRatio; + } + + /** + * Creates a debug event with the same properties as this event. + * + * @return a debug event + */ + public FeatureRequest toDebugEvent() { + return new FeatureRequest(getCreationDate(), getKey(), getContext(), getVersion(), + getVariation(), getValue(), getDefaultVal(), getReason(), getPrereqOf(), + false, null, true, samplingRatio, excludeFromSummaries); + } + } + + /** + * An event generated by a migration operation. + */ + public static final class MigrationOp extends Event { + private final String featureKey; + private final int variation; + private final LDValue value; + private final LDValue defaultVal; + private final EvaluationReason reason; + + private final long samplingRatio; + + private final String operation; + + private final int flagVersion; + + private final ConsistencyMeasurement consistencyMeasurement; + private final LatencyMeasurement latencyMeasurement; + private final ErrorMeasurement errorMeasurement; + private final InvokedMeasurement invokedMeasurement; + + /** + * Measurement used to indicate if the values in a read operation were consistent. + */ + public static final class ConsistencyMeasurement { + private final boolean consistent; + private final long samplingRatio; + + /** + * Construct a new consistency measurement. + * + * @param consistent true if the result was consistent + * @param samplingRatio the sampling ratio for the consistency check + */ + public ConsistencyMeasurement(boolean consistent, long samplingRatio) { + this.consistent = consistent; + this.samplingRatio = samplingRatio; + } + + /** + * Check if the operation was consistent. + * + * @return true if the operation was consistent + */ + public boolean isConsistent() { + return consistent; + } + + /** + * Get the sampling ratio for the consistency check. + * + * @return the sampling ratio + */ + public long getSamplingRatio() { + return samplingRatio; + } + } + + /** + * Latency measurement for a migration operation. + */ + public static final class LatencyMeasurement { + private final Long oldLatencyMs; + private final Long newLatencyMs; + + /** + * Construct a latency measurement. + * + * @param oldLatency the old method latency, in milliseconds, or null if the old method was not executed + * @param newLatency the new method latency, in milliseconds, or null if the new method was not executed + */ + public LatencyMeasurement(@Nullable Long oldLatency, @Nullable Long newLatency) { + this.oldLatencyMs = oldLatency; + this.newLatencyMs = newLatency; + } + + /** + * Get the old method execution latency in milliseconds. + * + * @return The old latency or null if the method was not invoked. + */ + public Long getOldLatencyMs() { + return oldLatencyMs; + } + + /** + * Get the new method execution latency in milliseconds. + * + * @return The new latency or null if the method was not invoked. + */ + public Long getNewLatencyMs() { + return newLatencyMs; + } + + /** + * Returns true if either of the durations are set. + * + * @return true if either of the durations are set + */ + public boolean hasMeasurement() { + return oldLatencyMs != null || newLatencyMs != null; + } + } + + /** + * Error measurement for a migration operation. + */ + public static final class ErrorMeasurement { + private final boolean oldError; + private final boolean newError; + + /** + * Construct an error measurement. + * + * @param oldError true if there was an error executing the old method + * @param newError true if there was an error executing the new method + */ + public ErrorMeasurement(boolean oldError, boolean newError) { + this.oldError = oldError; + this.newError = newError; + } + + /** + * Check if there was an error executing the old method. + * + * @return true if there was an error executing the old method + */ + public boolean hasOldError() { + return oldError; + } + + /** + * Check if there was an error executing the new method. + * + * @return true if there was an error executing the new method + */ + public boolean hasNewError() { + return newError; + } + + /** + * Returns true if there are errors present for either of the origins. + * + * @return true if errors are present + */ + public boolean hasMeasurement() { + return oldError || newError; + } + } + + /** + * Invoked measurement for a migration op. + *

+ * Indicates which origins/sources were executed while doing a migration operation. + */ + public static final class InvokedMeasurement { + private final boolean oldInvoked; + private final boolean newInvoked; + + /** + * Construct a new invoked measurement. + * + * @param oldInvoked true if old was invoked + * @param newInvoked true if new was invoked + */ + public InvokedMeasurement(boolean oldInvoked, boolean newInvoked) { + this.oldInvoked = oldInvoked; + this.newInvoked = newInvoked; + } + + /** + * Check if the old method was invoked. + * + * @return true if the old method was invoked + */ + public boolean wasOldInvoked() { + return oldInvoked; + } + + /** + * Check if the new method was invoked. + * + * @return true if the new method was invoked + */ + public boolean wasNewInvoked() { + return newInvoked; + } + } + + /** + * Construct a new migration operation event. + * + * @param timestamp the timestamp in milliseconds + * @param context the context associated with the event + * @param featureKey the flag key + * @param variation the result variation, or -1 if there was an error + * @param flagVersion the flag version, or -1 if the flag was not found + * @param value the result value + * @param defaultVal the default value passed by the application + * @param reason the evaluation reason, if it is to be included in the event + * @param samplingRatio the sampling ratio for this event + * @param operation the operation for the event + * @param invokedMeasurement measurement containing which origins were invoked + * @param consistencyMeasurement measurement containing results of a consistency check, or null if no check was done + * @param latencyMeasurement measurement containing the execution latencies of invoked methods, or null if no check + * was done + * @param errorMeasurement measurement reporting any errors, or null if no errors were encountered + */ + public MigrationOp( + long timestamp, + @NotNull LDContext context, + @NotNull String featureKey, + int variation, + int flagVersion, + @NotNull LDValue value, + @NotNull LDValue defaultVal, + @Nullable EvaluationReason reason, // For a server SDK this will not be null, but if it is ever used client side + // then likely this would be null unless evaluation reasons were requested. + long samplingRatio, + @NotNull String operation, + @NotNull InvokedMeasurement invokedMeasurement, // An invoked measurement is required. + @Nullable ConsistencyMeasurement consistencyMeasurement, + @Nullable LatencyMeasurement latencyMeasurement, + @Nullable ErrorMeasurement errorMeasurement + ) { + super(timestamp, context); + this.featureKey = featureKey; + this.variation = variation; + this.flagVersion = flagVersion; + this.value = value; + this.defaultVal = defaultVal; + this.reason = reason; + this.samplingRatio = samplingRatio; + this.operation = operation; + this.consistencyMeasurement = consistencyMeasurement; + this.latencyMeasurement = latencyMeasurement; + this.errorMeasurement = errorMeasurement; + this.invokedMeasurement = invokedMeasurement; + } + + /** + * The key of the feature flag that was evaluated. + * + * @return the flag key + */ + @NotNull + public String getFeatureKey() { + return featureKey; + } + + /** + * The index of the selected flag variation, or -1 if the application default value was used. + * + * @return zero-based index of the variation, or -1 + */ + public int getVariation() { + return variation; + } + + /** + * The version of the feature flag that was evaluated, or -1 if the flag was not found. + * + * @return the flag version or -1 + */ + public int getFlagVersion() { + return flagVersion; + } + + /** + * The value of the selected flag variation. + * + * @return the value + */ + @NotNull + public LDValue getValue() { + return value; + } + + /** + * The application default value used in the evaluation. + * + * @return the application default + */ + @NotNull + public LDValue getDefaultVal() { + return defaultVal; + } + + /** + * The {@link EvaluationReason} for this evaluation, or null if the reason was not requested for this evaluation. + * + * @return a reason object or null + */ + @Nullable + public EvaluationReason getReason() { + return reason; + } + + /** + * The {@link InvokedMeasurement} for this operation. + * + * @return the invoked measurement + */ + @NotNull + public InvokedMeasurement getInvokedMeasurement() { + return invokedMeasurement; + } + + /** + * The {@link LatencyMeasurement} for this operation. + * + * @return the latency measurement or null + */ + @Nullable + public LatencyMeasurement getLatencyMeasurement() { + return latencyMeasurement; + } + + /** + * The {@link ErrorMeasurement} for this operation. + * + * @return the error measurement or null + */ + @Nullable + public ErrorMeasurement getErrorMeasurement() { + return errorMeasurement; + } + + /** + * Get the {@link ConsistencyMeasurement} for this operation. + * + * @return the consistency measurement or null + */ + @Nullable + public ConsistencyMeasurement getConsistencyMeasurement() { + return consistencyMeasurement; + } + + /** + * Get the sampling ratio for this event. + * + * @return the sampling ratio + */ + @Override + public long getSamplingRatio() { + return samplingRatio; + } + + /** + * Get the migration operation for this event. + * + * @return the migration operation + */ + public String getOperation() { + return operation; + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextDeduplicator.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextDeduplicator.java new file mode 100644 index 0000000..ec8c846 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextDeduplicator.java @@ -0,0 +1,31 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDContext; + +/** + * Interface for a strategy for removing duplicate contexts from the event stream. This has + * been factored out of DefaultEventProcessor because the client-side and server-side SDKs + * behave differently (client-side does not send index events). + */ +public interface EventContextDeduplicator { + /** + * Returns the millisecond interval, if any, at which the event processor should call flush(). + * + * @return a number of milliseconds, or null if not applicable + */ + Long getFlushInterval(); + + /** + * Updates the internal state if necessary to reflect that we have seen the given context. + * Returns true if it is time to insert an index event for this context into the event output. + * + * @param context a context object + * @return true if an index event should be emitted + */ + boolean processContext(LDContext context); + + /** + * Forgets any cached context information, so all subsequent contexs will be treated as new. + */ + void flush(); +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextFormatter.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextFormatter.java new file mode 100644 index 0000000..878ed79 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventContextFormatter.java @@ -0,0 +1,219 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.LDValueType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static com.launchdarkly.sdk.internal.GsonHelpers.gsonInstance; + +/** + * Implements serialization of contexts within JSON event data. This uses a similar schema to the + * regular context JSON schema (i.e. what you get if you call JsonSerialization.serialize() on an + * LDContext), but not quite the same, because it transforms the context to redact any attributes + * (or subproperties of attributes that are objects) that were designated as private, accumulating + * a list of the names of these in _meta.redactedAttributes. + *

+ * This implementation is optimized to avoid unnecessary work in the typical use case where there + * aren't any private attributes. + */ +class EventContextFormatter { + private final boolean allAttributesPrivate; + private final AttributeRef[] globalPrivateAttributes; + + EventContextFormatter(boolean allAttributesPrivate, AttributeRef[] globalPrivateAttributes) { + this.allAttributesPrivate = allAttributesPrivate; + this.globalPrivateAttributes = globalPrivateAttributes == null ? new AttributeRef[0] : globalPrivateAttributes; + } + + public void write(LDContext c, JsonWriter w, boolean redactAnonymous) throws IOException { + if (c.isMultiple()) { + w.beginObject(); + w.name("kind").value("multi"); + for (int i = 0; i < c.getIndividualContextCount(); i++) { + LDContext c1 = c.getIndividualContext(i); + w.name(c1.getKind().toString()); + writeSingleKind(c1, w, false, redactAnonymous); + } + w.endObject(); + } else { + writeSingleKind(c, w, true, redactAnonymous); + } + } + + private void writeSingleKind(LDContext c, JsonWriter w, boolean includeKind, boolean redactAnonymous) throws IOException { + w.beginObject(); + + // kind, key, and anonymous are never redacted + if (includeKind) { + w.name("kind").value(c.getKind().toString()); + } + w.name("key").value(c.getKey()); + if (c.isAnonymous()) { + w.name("anonymous").value(true); + } + + List redacted = null; + if (c.getName() != null) { + if (isAttributeEntirelyPrivate(c, "name", redactAnonymous)) { + redacted = addOrCreate(redacted, "name"); + } else { + w.name("name").value(c.getName()); + } + } + + for (String attrName: c.getCustomAttributeNames()) { + redacted = writeOrRedactAttribute(w, c, attrName, c.getValue(attrName), redacted, redactAnonymous); + } + + boolean haveRedacted = redacted != null && !redacted.isEmpty(); + if (haveRedacted) { + w.name("_meta").beginObject(); + w.name("redactedAttributes").beginArray(); + for (String a: redacted) { + w.value(a); + } + w.endArray(); + w.endObject(); + } + + w.endObject(); + } + + private boolean isAttributeEntirelyPrivate(LDContext c, String attrName, boolean redactAnonymous) { + if (allAttributesPrivate) { + return true; + } else if (redactAnonymous && c.isAnonymous()) { + return true; + } + AttributeRef privateRef = findPrivateRef(c, 1, attrName, null); + return privateRef != null && privateRef.getDepth() == 1; + } + + private List writeOrRedactAttribute( + JsonWriter w, + LDContext c, + String attrName, + LDValue value, + List redacted, + boolean redactAnonymous + ) throws IOException { + if (allAttributesPrivate) { + return addOrCreate(redacted, attrName); + } else if (redactAnonymous && c.isAnonymous()) { + return addOrCreate(redacted, attrName); + } + return writeRedactedValue(w, c, 0, attrName, value, null, redacted); + } + + // This method implements the context-aware attribute redaction logic, in which an attribute + // can be 1. written as-is, 2. fully redacted, or 3. (for a JSON object) partially redacted. + // It returns the updated redacted attribute list. + private List writeRedactedValue( + JsonWriter w, + LDContext c, + int previousDepth, + String attrName, + LDValue value, + AttributeRef previousMatchRef, + List redacted + ) throws IOException { + // See findPrivateRef for the meaning of the previousMatchRef parameter. + int depth = previousDepth + 1; + AttributeRef privateRef = findPrivateRef(c, depth, attrName, previousMatchRef); + + // If privateRef is non-null, then it is either an exact match for the property we're looking at, + // or it refers to a subproperty of it (for instance, if we are redacting property "b" within + // attribute "a", it could be /a/b [depth 2] or /a/b/c [depth 3]). If the depth shows that it's an + // exact match, this whole value is redacted and we don't bother recursing. + if (privateRef != null && privateRef.getDepth() == depth) { + return addOrCreate(redacted, privateRef.toString()); + } + + // If privateRef is null (there was no matching private attribute)-- or, if privateRef isn't null + // but it refers to a subproperty, and this value isn't an object so it has no properties-- then + // we just write the value unredacted. + if (privateRef == null || value.getType() != LDValueType.OBJECT) { + writeNameAndValue(w, attrName, value); + return redacted; + } + + // At this point we know it is an object and we are redacting subproperties. + w.name(attrName).beginObject(); + for (String name: value.keys()) { + redacted = writeRedactedValue(w, c, depth, name, value.get(name), privateRef, redacted); + } + w.endObject(); + return redacted; + } + + // Searches both the globally private attributes and the per-context private attributes to find a + // match for the attribute or subproperty we're looking at. + // + // If we find one that exactly matches the current path (that is, the depth is the same), we + // return that one, because that would tell us that the entire attribute/subproperty should be + // redacted. If we don't find that, but we do find at least one match for a subproperty of this + // path (that is, it has the current path as a prefix, but the depth is greater), then we return + // it, to tell us that we'll need to recurse to redact subproperties. + // + // The previousMatchRef parameter is how we to keep track of the previous path segments we have + // already matched when recursing. It starts out as null at the top level. Then, every time we + // recurse to redact subproperties of an object, we set previousMatchRef to *any* AttributeRef + // we've seen that has the current subpath as a prefix; such an AttributeRef is guaranteed to + // exist, because we wouldn't have bothered to recurse if we hadn't found one, and we will only + // be comparing components 0 through depth-1 of it (see matchPrivateRef). This shortcut allows + // us to avoid allocating a variable-length mutable data structure such as a stack. + private AttributeRef findPrivateRef(LDContext c, int depth, String attrName, AttributeRef previousMatchRef) { + AttributeRef nonExactMatch = null; + if (globalPrivateAttributes.length != 0) { // minor optimization to avoid creating an iterator if it's empty + for (AttributeRef globalPrivate: globalPrivateAttributes) { + if (matchPrivateRef(globalPrivate, depth, attrName, previousMatchRef)) { + if (globalPrivate.getDepth() == depth) { + return globalPrivate; + } + nonExactMatch = globalPrivate; + } + } + } + for (int i = 0; i < c.getPrivateAttributeCount(); i++) { + AttributeRef contextPrivate = c.getPrivateAttribute(i); + if (matchPrivateRef(contextPrivate, depth, attrName, previousMatchRef)) { + if (contextPrivate.getDepth() == depth) { + return contextPrivate; + } + nonExactMatch = contextPrivate; + } + } + return nonExactMatch; + } + + private static boolean matchPrivateRef(AttributeRef ref, int depth, String attrName, AttributeRef previousMatchRef) { + if (ref.getDepth() < depth) { + return false; + } + for (int i = 0; i < (depth - 1); i++) { + if (!ref.getComponent(i).equals(previousMatchRef.getComponent(i))) { + return false; + } + } + return ref.getComponent(depth - 1).equals(attrName); + } + + private static void writeNameAndValue(JsonWriter w, String name, LDValue value) throws IOException { + w.name(name); + gsonInstance().toJson(value, LDValue.class, w); + } + + private static List addOrCreate(List list, T value) { + if (list == null) { + list = new ArrayList<>(); + } + list.add(value); + return list; + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventOutputFormatter.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventOutputFormatter.java new file mode 100644 index 0000000..3f0c25d --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventOutputFormatter.java @@ -0,0 +1,325 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.events.EventSummarizer.CounterValue; +import com.launchdarkly.sdk.internal.events.EventSummarizer.FlagInfo; +import com.launchdarkly.sdk.internal.events.EventSummarizer.SimpleIntKeyedMap; + +import java.io.IOException; +import java.io.Writer; +import java.util.Map; + +import static com.launchdarkly.sdk.internal.GsonHelpers.gsonInstance; + +/** + * Transforms analytics events and summary data into the JSON format that we send to LaunchDarkly. + * Rather than creating intermediate objects to represent this schema, we use the Gson streaming + * output API to construct JSON directly. + *

+ * Test coverage for this logic is in EventOutputTest and DefaultEventProcessorOutputTest. The + * handling of context data and private attribute redaction is implemented in EventContextFormatter + * and tested in more detail in EventContextFormatterTest. + */ +final class EventOutputFormatter { + private final EventContextFormatter contextFormatter; + + EventOutputFormatter(EventsConfiguration config) { + this.contextFormatter = new EventContextFormatter( + config.allAttributesPrivate, + config.privateAttributes.toArray(new AttributeRef[config.privateAttributes.size()])); + } + + int writeOutputEvents(Event[] events, EventSummarizer.EventSummary summary, Writer writer) throws IOException { + int count = 0; + JsonWriter jsonWriter = new JsonWriter(writer); + jsonWriter.beginArray(); + for (Event event: events) { + if (writeOutputEvent(event, jsonWriter)) { + count++; + } + } + if (!summary.isEmpty()) { + writeSummaryEvent(summary, jsonWriter); + count++; + } + jsonWriter.endArray(); + jsonWriter.flush(); + return count; + } + + private boolean writeOutputEvent(Event event, JsonWriter jw) throws IOException { + if (event.getContext() == null || !event.getContext().isValid()) { + // The SDK should never send us an event without a valid context, but if we somehow get one, + // just skip the event since there's no way to serialize it. + return false; + } + if (event instanceof Event.FeatureRequest) { + Event.FeatureRequest fe = (Event.FeatureRequest)event; + jw.beginObject(); + writeKindAndCreationDate(jw, fe.isDebug() ? "debug" : "feature", event.getCreationDate()); + jw.name("key").value(fe.getKey()); + writeContext(fe.getContext(), jw, !fe.isDebug()); + if (fe.getVersion() >= 0) { + jw.name("version"); + jw.value(fe.getVersion()); + } + if (fe.getVariation() >= 0) { + jw.name("variation"); + jw.value(fe.getVariation()); + } + writeLDValue("value", fe.getValue(), jw); + writeLDValue("default", fe.getDefaultVal(), jw); + if (fe.getPrereqOf() != null) { + jw.name("prereqOf"); + jw.value(fe.getPrereqOf()); + } + writeEvaluationReason(fe.getReason(), jw); + jw.endObject(); + } else if (event instanceof Event.Identify) { + jw.beginObject(); + writeKindAndCreationDate(jw, "identify", event.getCreationDate()); + writeContext(event.getContext(), jw, false); + jw.endObject(); + } else if (event instanceof Event.Custom) { + Event.Custom ce = (Event.Custom)event; + jw.beginObject(); + writeKindAndCreationDate(jw, "custom", event.getCreationDate()); + jw.name("key").value(ce.getKey()); + writeContextKeys(ce.getContext(), jw); + writeLDValue("data", ce.getData(), jw); + if (ce.getMetricValue() != null) { + jw.name("metricValue"); + jw.value(ce.getMetricValue()); + } + jw.endObject(); + } else if (event instanceof Event.Index) { + jw.beginObject(); + writeKindAndCreationDate(jw, "index", event.getCreationDate()); + writeContext(event.getContext(), jw, false); + jw.endObject(); + } else if (event instanceof Event.MigrationOp) { + jw.beginObject(); + writeKindAndCreationDate(jw, "migration_op", event.getCreationDate()); + writeContextKeys(event.getContext(), jw); + + Event.MigrationOp me = (Event.MigrationOp)event; + jw.name("operation").value(me.getOperation()); + + long samplingRatio = me.getSamplingRatio(); + if(samplingRatio != 1) { + jw.name("samplingRatio").value(samplingRatio); + } + + writeMigrationEvaluation(jw, me); + writeMeasurements(jw, me); + + jw.endObject(); + } else { + return false; + } + return true; + } + + private static void writeMeasurements(JsonWriter jw, Event.MigrationOp me) throws IOException { + jw.name("measurements"); + jw.beginArray(); + + writeInvokedMeasurement(jw, me); + writeConsistencyMeasurement(jw, me); + writeLatencyMeasurement(jw, me); + writeErrorMeasurement(jw, me); + + jw.endArray(); // end measurements + } + + private static void writeErrorMeasurement(JsonWriter jw, Event.MigrationOp me) throws IOException { + Event.MigrationOp.ErrorMeasurement errorMeasurement = me.getErrorMeasurement(); + if(errorMeasurement != null && errorMeasurement.hasMeasurement()) { + jw.beginObject(); + jw.name("key").value("error"); + jw.name("values"); + jw.beginObject(); + if(errorMeasurement.hasOldError()) { + jw.name("old").value(errorMeasurement.hasOldError()); + } + if(errorMeasurement.hasNewError()) { + jw.name("new").value(errorMeasurement.hasNewError()); + } + jw.endObject(); // end of values + jw.endObject(); // end of measurement + } + } + + private static void writeLatencyMeasurement(JsonWriter jw, Event.MigrationOp me) throws IOException { + Event.MigrationOp.LatencyMeasurement latencyMeasurement = me.getLatencyMeasurement(); + if(latencyMeasurement != null && latencyMeasurement.hasMeasurement()) { + jw.beginObject(); + + jw.name("key").value("latency_ms"); + + jw.name("values"); + jw.beginObject(); + if(latencyMeasurement.getOldLatencyMs() != null) { + jw.name("old").value(latencyMeasurement.getOldLatencyMs()); + } + if(latencyMeasurement.getNewLatencyMs() != null) { + jw.name("new").value(latencyMeasurement.getNewLatencyMs()); + } + + jw.endObject(); // end of values + jw.endObject(); // end of measurement + } + } + + private static void writeConsistencyMeasurement(JsonWriter jw, Event.MigrationOp me) throws IOException { + Event.MigrationOp.ConsistencyMeasurement consistencyMeasurement = me.getConsistencyMeasurement(); + if(consistencyMeasurement != null) { + jw.beginObject(); + jw.name("key").value("consistent"); + jw.name("value").value(consistencyMeasurement.isConsistent()); + if(consistencyMeasurement.getSamplingRatio() != 1) { + jw.name("samplingRatio").value(consistencyMeasurement.getSamplingRatio()); + } + jw.endObject(); // end measurement + } + } + + private static void writeInvokedMeasurement(JsonWriter jw, Event.MigrationOp me) throws IOException { + jw.beginObject(); + jw.name("key").value("invoked"); + Event.MigrationOp.InvokedMeasurement invokedMeasurement = me.getInvokedMeasurement(); + + jw.name("values"); + jw.beginObject(); + if(invokedMeasurement.wasOldInvoked()) { + jw.name("old").value(invokedMeasurement.wasOldInvoked()); + } + if(invokedMeasurement.wasNewInvoked()) { + jw.name("new").value(invokedMeasurement.wasNewInvoked()); + } + jw.endObject(); // end values + jw.endObject(); // end measurement + } + + private void writeMigrationEvaluation(JsonWriter jw, Event.MigrationOp me) throws IOException { + jw.name("evaluation"); + jw.beginObject(); + jw.name("key").value(me.getFeatureKey()); + if (me.getVariation() >= 0) { + jw.name("variation"); + jw.value(me.getVariation()); + } + if (me.getFlagVersion() >= 0) { + jw.name("version"); + jw.value(me.getFlagVersion()); + } + writeLDValue("value", me.getValue(), jw); + writeLDValue("default", me.getDefaultVal(), jw); + writeEvaluationReason(me.getReason(), jw); + jw.endObject(); + } + + private void writeSummaryEvent(EventSummarizer.EventSummary summary, JsonWriter jw) throws IOException { + jw.beginObject(); + + jw.name("kind"); + jw.value("summary"); + + jw.name("startDate"); + jw.value(summary.startDate); + jw.name("endDate"); + jw.value(summary.endDate); + + jw.name("features"); + jw.beginObject(); + + for (Map.Entry flag: summary.counters.entrySet()) { + String flagKey = flag.getKey(); + FlagInfo flagInfo = flag.getValue(); + + jw.name(flagKey); + jw.beginObject(); + + writeLDValue("default", flagInfo.defaultVal, jw); + jw.name("contextKinds").beginArray(); + for (String kind: flagInfo.contextKinds) { + jw.value(kind); + } + jw.endArray(); + + jw.name("counters"); + jw.beginArray(); + + for (int i = 0; i < flagInfo.versionsAndVariations.size(); i++) { + int version = flagInfo.versionsAndVariations.keyAt(i); + SimpleIntKeyedMap variations = flagInfo.versionsAndVariations.valueAt(i); + for (int j = 0; j < variations.size(); j++) { + int variation = variations.keyAt(j); + CounterValue counter = variations.valueAt(j); + + jw.beginObject(); + + if (variation >= 0) { + jw.name("variation").value(variation); + } + if (version >= 0) { + jw.name("version").value(version); + } else { + jw.name("unknown").value(true); + } + writeLDValue("value", counter.flagValue, jw); + jw.name("count").value(counter.count); + + jw.endObject(); + } + } + + jw.endArray(); // end of "counters" array + jw.endObject(); // end of this flag + } + + jw.endObject(); // end of "features" + jw.endObject(); // end of summary event object + } + + private void writeKindAndCreationDate(JsonWriter jw, String kind, long creationDate) throws IOException { + jw.name("kind").value(kind); + jw.name("creationDate").value(creationDate); + } + + private void writeContext(LDContext context, JsonWriter jw, boolean redactAnonymous) throws IOException { + jw.name("context"); + contextFormatter.write(context, jw, redactAnonymous); + } + + private void writeContextKeys(LDContext context, JsonWriter jw) throws IOException { + jw.name("contextKeys").beginObject(); + for (int i = 0; i < context.getIndividualContextCount(); i++) { + LDContext c = context.getIndividualContext(i); + if (c != null) { + jw.name(c.getKind().toString()).value(c.getKey()); + } + } + jw.endObject(); + } + + private void writeLDValue(String key, LDValue value, JsonWriter jw) throws IOException { + if (value == null || value.isNull()) { + return; + } + jw.name(key); + gsonInstance().toJson(value, LDValue.class, jw); // LDValue defines its own custom serializer + } + + private void writeEvaluationReason(EvaluationReason er, JsonWriter jw) throws IOException { + if (er == null) { + return; + } + jw.name("reason"); + gsonInstance().toJson(er, EvaluationReason.class, jw); // EvaluationReason defines its own custom serializer + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventProcessor.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventProcessor.java new file mode 100644 index 0000000..2f26e2f --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventProcessor.java @@ -0,0 +1,42 @@ +package com.launchdarkly.sdk.internal.events; + +/** + * Interface describing the {@link DefaultEventProcessor} methods. There will normally only ever + * be one implementation of this, but having an interface allows for mocking in tests. + */ +public interface EventProcessor { + /** + * Enqueues an event. + * + * @param e the input data + */ + void sendEvent(Event e); + + /** + * Schedules an asynchronous flush. + */ + void flushAsync(); + + /** + * Flushes and blocks until the flush is done. + */ + void flushBlocking(); + + /** + * Tells the event processor whether we should be in background mode. This is only applicable in the client-side + * (Android) SDK. In background mode, events mostly work the same but we do not send any periodic diagnostic events. + * + * @param inBackground true if we should be in background mode + */ + void setInBackground(boolean inBackground); + + /** + * Tells the event processor whether we should be in background mode. This is only applicable in the client-side + * (Android) SDK; in the server-side Java SDK, offline mode does not change dynamically and so we don't even + * bother to create an event processor if we're offline. In offline mode, events are enqueued but never flushed, + * and diagnostic events are not sent. + * + * @param offline true if we should be in offline mode + */ + void setOffline(boolean offline); +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSender.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSender.java new file mode 100644 index 0000000..2f2a31e --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSender.java @@ -0,0 +1,92 @@ +package com.launchdarkly.sdk.internal.events; + +import java.io.Closeable; +import java.net.URI; +import java.util.Date; + +/** + * Internal interface for a component that can deliver preformatted event data. + *

+ * This is separate from the public EventSender interface in the SDK that applications can use to + * provide a custom implementation. The latter is used as a wrapper for this one, so we do not have + * to expose any types from the internal events code. The public interface is simpler because it + * only needs to return success/failure/shutdown status; the use of the Date header is an + * implementation detail that is specific to the default HTTP implementation of event delivery. + */ +public interface EventSender extends Closeable { + /** + * Attempt to deliver an analytics event data payload. + *

+ * This method will be called synchronously from an event delivery worker thread. + * + * @param data the preformatted JSON data, in UTF-8 encoding + * @param eventCount the number of individual events in the data + * @param eventsBaseUri the configured events endpoint base URI + * @return a {@link Result} + */ + Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri); + + /** + * Attempt to deliver a diagnostic event data payload. + *

+ * This method will be called synchronously from an event delivery worker thread. + * + * @param data the preformatted JSON data, as a string + * @param eventsBaseUri the configured events endpoint base URI + * @return a {@link Result} + */ + Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri); + + /** + * Encapsulates the results of a call to an EventSender method. + */ + public static final class Result { + private boolean success; + private boolean mustShutDown; + private Date timeFromServer; + + /** + * Constructs an instance. + * + * @param success true if the events were delivered + * @param mustShutDown true if an unrecoverable error (such as an HTTP 401 error, implying that the + * SDK key is invalid) means the SDK should permanently stop trying to send events + * @param timeFromServer the parsed value of an HTTP Date header received from the remote server, + * if any; this is used to compensate for differences between the application's time and server time + */ + public Result(boolean success, boolean mustShutDown, Date timeFromServer) { + this.success = success; + this.mustShutDown = mustShutDown; + this.timeFromServer = timeFromServer; + } + + /** + * Returns true if the events were delivered. + * + * @return true if the events were delivered + */ + public boolean isSuccess() { + return success; + } + + /** + * Returns true if an unrecoverable error (such as an HTTP 401 error, implying that the + * SDK key is invalid) means the SDK should permanently stop trying to send events + * + * @return true if event delivery should shut down + */ + public boolean isMustShutDown() { + return mustShutDown; + } + + /** + * Returns the parsed value of an HTTP Date header received from the remote server, if any. This + * is used to compensate for differences between the application's time and server time. + * + * @return a date value or null + */ + public Date getTimeFromServer() { + return timeFromServer; + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSummarizer.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSummarizer.java new file mode 100644 index 0000000..9eaef83 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventSummarizer.java @@ -0,0 +1,320 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Manages the state of summarizable information for the EventProcessor. Note that the + * methods of this class are deliberately not thread-safe, because they should always + * be called from EventProcessor's single message-processing thread. + */ +final class EventSummarizer { + private EventSummary eventsState; + + EventSummarizer() { + this.eventsState = new EventSummary(); + } + + /** + * Adds information about an evaluation to our counters. + * + * @param timestamp the millisecond timestamp + * @param flagKey the flag key + * @param flagVersion the flag version, or -1 if the flag is unknown + * @param variation the result variation, or -1 if none + * @param value the result value + * @param defaultValue the application default value + * @param context the evaluation context + */ + void summarizeEvent( + long timestamp, + String flagKey, + int flagVersion, + int variation, + LDValue value, + LDValue defaultValue, + LDContext context + ) { + eventsState.incrementCounter(flagKey, flagVersion, variation, value, defaultValue, context); + eventsState.noteTimestamp(timestamp); + } + + /** + * Gets the current summarized event data, and resets the EventSummarizer's state to contain + * a new empty EventSummary. + * + * @return the summary state + */ + EventSummary getSummaryAndReset() { + EventSummary ret = eventsState; + clear(); + return ret; + } + + /** + * Indicates that we decided not to send the summary values returned by {@link #getSummaryAndReset()}, + * and instead we should return to using the previous state object and keep accumulating data + * in it. + */ + void restoreTo(EventSummary previousState) { + eventsState = previousState; + } + + /** + * Returns true if there is no summary data in the current state. + * + * @return true if the state is empty + */ + boolean isEmpty() { + return eventsState.isEmpty(); + } + + void clear() { + eventsState = new EventSummary(); + } + + static final class EventSummary { + final Map counters; + long startDate; + long endDate; + + EventSummary() { + counters = new HashMap<>(); + } + + EventSummary(EventSummary from) { + counters = new HashMap<>(from.counters); + startDate = from.startDate; + endDate = from.endDate; + } + + boolean isEmpty() { + return counters.isEmpty(); + } + + void incrementCounter( + String flagKey, + int flagVersion, + int variation, + LDValue flagValue, + LDValue defaultVal, + LDContext context + ) { + FlagInfo flagInfo = counters.get(flagKey); + if (flagInfo == null) { + flagInfo = new FlagInfo(defaultVal, new SimpleIntKeyedMap<>(), new HashSet<>()); + counters.put(flagKey, flagInfo); + } + for (int i = 0; i < context.getIndividualContextCount(); i++) { + flagInfo.contextKinds.add(context.getIndividualContext(i).getKind().toString()); + } + + SimpleIntKeyedMap variations = flagInfo.versionsAndVariations.get(flagVersion); + if (variations == null) { + variations = new SimpleIntKeyedMap<>(); + flagInfo.versionsAndVariations.put(flagVersion, variations); + } + + CounterValue value = variations.get(variation); + if (value == null) { + variations.put(variation, new CounterValue(1, flagValue)); + } else { + value.increment(); + } + } + + void noteTimestamp(long time) { + if (startDate == 0 || time < startDate) { + startDate = time; + } + if (time > endDate) { + endDate = time; + } + } + + @Override + public boolean equals(Object other) { + if (other instanceof EventSummary) { + EventSummary o = (EventSummary)other; + return o.counters.equals(counters) && startDate == o.startDate && endDate == o.endDate; + } + return false; + } + + @Override + public int hashCode() { + // We can't make meaningful hash codes for EventSummary, because the same counters could be + // represented differently in our Map. It doesn't matter because there's no reason to use an + // EventSummary instance as a hash key. + return 0; + } + } + + static final class FlagInfo { + final LDValue defaultVal; + final SimpleIntKeyedMap> versionsAndVariations; + final Set contextKinds; + + FlagInfo(LDValue defaultVal, SimpleIntKeyedMap> versionsAndVariations, + Set contextKinds) { + this.defaultVal = defaultVal; + this.versionsAndVariations = versionsAndVariations; + this.contextKinds = contextKinds; + } + + @Override + public boolean equals(Object other) { // used only in tests + if (other instanceof FlagInfo) { + FlagInfo o = (FlagInfo)other; + return o.defaultVal.equals(this.defaultVal) && o.versionsAndVariations.equals(this.versionsAndVariations) && + o.contextKinds.equals(this.contextKinds); + } + return false; + } + + @Override + public int hashCode() { // used only in tests + return this.defaultVal.hashCode() + 31 * versionsAndVariations.hashCode(); + } + + @Override + public String toString() { // used only in tests + return "(default=" + defaultVal + ", counters=" + versionsAndVariations + ", contextKinds=" + + String.join(",", contextKinds) + ")"; + } + } + + static final class CounterValue { + long count; + final LDValue flagValue; + + CounterValue(long count, LDValue flagValue) { + this.count = count; + this.flagValue = flagValue; + } + + void increment() { + count = count + 1; + } + + @Override + public boolean equals(Object other) { // used only in tests + if (other instanceof CounterValue) { + CounterValue o = (CounterValue)other; + return count == o.count && Objects.equals(flagValue, o.flagValue); + } + return false; + } + + @Override + public String toString() { // used only in tests + return "(" + count + "," + flagValue + ")"; + } + } + + // A very simple array-backed structure with map-like semantics for primitive int keys. This + // is highly specialized for the EventSummarizer use case (which is why it is an inner class + // of EventSummarizer, to emphasize that it should not be used elsewhere). It makes the + // following assumptions: + // - The number of keys will almost always be small: most flags have only a few variations, + // and most flags will have only one version or a few versions during the lifetime of an + // event payload. Therefore, we use simple iteration and int comparisons for the keys; the + // overhead of this is likely less than the overhead of maintaining a hashtable and creating + // objects for its keys and iterators. + // - Data will never be deleted from the map after being added (the summarizer simply makes + // a new map when it's time to start over). + static final class SimpleIntKeyedMap { + private static final int INITIAL_CAPACITY = 4; + + private int[] keys; + private Object[] values; + private int n; + + SimpleIntKeyedMap() { + keys = new int[INITIAL_CAPACITY]; + values = new Object[INITIAL_CAPACITY]; + } + + int size() { + return n; + } + + int capacity() { + return keys.length; + } + + int keyAt(int index) { + return keys[index]; + } + + @SuppressWarnings("unchecked") + T valueAt(int index) { + return (T)values[index]; + } + + @SuppressWarnings("unchecked") + T get(int key) { + for (int i = 0; i < n; i++) { + if (keys[i] == key) { + return (T)values[i]; + } + } + return null; + } + + SimpleIntKeyedMap put(int key, T value) { + for (int i = 0; i < n; i++) { + if (keys[i] == key) { + values[i] = value; + return this; + } + } + if (n == keys.length) { + int[] newKeys = new int[keys.length * 2]; + System.arraycopy(keys, 0, newKeys, 0, n); + Object[] newValues = new Object[keys.length * 2]; + System.arraycopy(values, 0, newValues, 0, n); + keys = newKeys; + values = newValues; + } + keys[n] = key; + values[n] = value; + n++; + return this; + } + + @SuppressWarnings("unchecked") + @Override + public boolean equals(Object o) { // used only in tests + if (o instanceof SimpleIntKeyedMap) { + SimpleIntKeyedMap other = (SimpleIntKeyedMap)o; + if (this.n == other.n) { + for (int i = 0; i < n; i++) { + T value1 = (T)values[i], value2 = other.get(keys[i]); + if (!Objects.equals(value1, value2)) { + return false; + } + } + return true; + } + } + return false; + } + + @Override + public String toString() { // used only in tests + StringBuilder s = new StringBuilder("{"); + for (int i = 0; i < n; i++) { + s.append(keys[i]).append("=").append(values[i] == null ? "null" : values[i].toString()); + } + s.append("}"); + return s.toString(); + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventsConfiguration.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventsConfiguration.java new file mode 100644 index 0000000..c0c2088 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/EventsConfiguration.java @@ -0,0 +1,81 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.AttributeRef; + +import java.net.URI; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +/** + * Internal representation of the configuration properties for {@link DefaultEventProcessor}. + * This class is not exposed in the public SDK API. + */ +public final class EventsConfiguration { + /** + * Default number of event-sending worker threads. + */ + public static final int DEFAULT_EVENT_SENDING_THREAD_POOL_SIZE = 5; + + final boolean allAttributesPrivate; + final int capacity; + final EventContextDeduplicator contextDeduplicator; + final long diagnosticRecordingIntervalMillis; + final DiagnosticStore diagnosticStore; + final EventSender eventSender; + final int eventSendingThreadPoolSize; + final URI eventsUri; + final long flushIntervalMillis; + final boolean initiallyInBackground; + final boolean initiallyOffline; + final List privateAttributes; + + /** + * Creates an instance. + * + * @param allAttributesPrivate true if all attributes are private + * @param capacity event buffer capacity (if zero or negative, a value of 1 is used to prevent errors) + * @param contextDeduplicator optional EventContextDeduplicator; null for client-side SDK + * @param diagnosticRecordingIntervalMillis diagnostic recording interval + * @param diagnosticStore optional DiagnosticStore; null if diagnostics are disabled + * @param eventSender event delivery component; must not be null + * @param eventSendingThreadPoolSize number of worker threads for event delivery; zero to use the default + * @param eventsUri events base URI + * @param flushIntervalMillis event flush interval + * @param initiallyInBackground true if we should start out in background mode (see + * {@link DefaultEventProcessor#setInBackground(boolean)}) + * @param initiallyOffline true if we should start out in offline mode (see + * {@link DefaultEventProcessor#setOffline(boolean)}) + * @param privateAttributes list of private attribute references; may be null + */ + public EventsConfiguration( + boolean allAttributesPrivate, + int capacity, + EventContextDeduplicator contextDeduplicator, + long diagnosticRecordingIntervalMillis, + DiagnosticStore diagnosticStore, + EventSender eventSender, + int eventSendingThreadPoolSize, + URI eventsUri, + long flushIntervalMillis, + boolean initiallyInBackground, + boolean initiallyOffline, + Collection privateAttributes + ) { + super(); + this.allAttributesPrivate = allAttributesPrivate; + this.capacity = capacity >= 0 ? capacity : 1; + this.contextDeduplicator = contextDeduplicator; + this.diagnosticRecordingIntervalMillis = diagnosticRecordingIntervalMillis; + this.diagnosticStore = diagnosticStore; + this.eventSender = eventSender; + this.eventSendingThreadPoolSize = eventSendingThreadPoolSize >= 0 ? eventSendingThreadPoolSize : + DEFAULT_EVENT_SENDING_THREAD_POOL_SIZE; + this.eventsUri = eventsUri; + this.flushIntervalMillis = flushIntervalMillis; + this.initiallyInBackground = initiallyInBackground; + this.initiallyOffline = initiallyOffline; + this.privateAttributes = privateAttributes == null ? Collections.emptyList() : new ArrayList<>(privateAttributes); + } +} \ No newline at end of file diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Sampler.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Sampler.java new file mode 100644 index 0000000..f86e02b --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/Sampler.java @@ -0,0 +1,28 @@ +package com.launchdarkly.sdk.internal.events; + +import java.util.concurrent.ThreadLocalRandom; + +/** + * Class used for event sampling. + */ +public final class Sampler { + /** + * Given a ratio determine if an event should be sampled. + * + * @param ratio the sampling ratio + * @return true if it should be sampled + */ + public static boolean shouldSample(long ratio) { + if(ratio == 1) { + return true; + } + if(ratio == 0) { + return false; + } + + // Checking for any number in the range will have approximately a 1 in X + // chance. So we check for 0 as it is part of any range. + // This random number is not used for cryptographic purposes. + return ThreadLocalRandom.current().nextLong(ratio) == 0; + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/package-info.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/package-info.java new file mode 100644 index 0000000..4bff3aa --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/events/package-info.java @@ -0,0 +1,10 @@ +/** + * This package contains the core implementation of analytics events and diagnostic events + * used by the Java SDK and Android SDK. + *

+ * All types in this package are for internal LaunchDarkly use only, and are subject to change. + * They are not part of the public supported API of the SDKs, and they should not be referenced + * by application code. They have public scope only because they need to be available to + * LaunchDarkly SDK code in other packages. + */ +package com.launchdarkly.sdk.internal.events; diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HeadersTransformer.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HeadersTransformer.java new file mode 100644 index 0000000..944a2c6 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HeadersTransformer.java @@ -0,0 +1,15 @@ +package com.launchdarkly.sdk.internal.http; + +import java.util.Map; + +/** + * Callback interface for dynamically configuring HTTP headers on a per-request basis. + */ +public interface HeadersTransformer { + /** + * Transforms the headers that will be added to a request. + * + * @param headers The unmodified headers the SDK prepared for the request + */ + void updateHeaders(Map headers); +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpConsts.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpConsts.java new file mode 100644 index 0000000..6595216 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpConsts.java @@ -0,0 +1,5 @@ +package com.launchdarkly.sdk.internal.http; + +public final class HttpConsts { + public static final String QUERY_PARAM_FILTER = "filter"; +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpErrors.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpErrors.java new file mode 100644 index 0000000..e99126e --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpErrors.java @@ -0,0 +1,95 @@ +package com.launchdarkly.sdk.internal.http; + +import com.launchdarkly.logging.LDLogger; + +/** + * Contains shared helpers related to HTTP response validation. + *

+ * This class is for internal use only and should not be documented in the SDK API. It is not + * supported for any use outside of the LaunchDarkly SDKs, and is subject to change without notice. + */ +public abstract class HttpErrors { + private HttpErrors() {} + + /** + * Represents an HTTP response error as an exception. + */ + @SuppressWarnings("serial") + public static final class HttpErrorException extends Exception { + private final int status; + + /** + * Constructs an instance. + * @param status the status code + */ + public HttpErrorException(int status) { + super("HTTP error " + status); + this.status = status; + } + + /** + * Returns the status code. + * @return the status code + */ + public int getStatus() { + return status; + } + } + + /** + * Tests whether an HTTP error status represents a condition that might resolve on its own if we retry. + * @param statusCode the HTTP status + * @return true if retrying makes sense; false if it should be considered a permanent failure + */ + public static boolean isHttpErrorRecoverable(int statusCode) { + if (statusCode >= 400 && statusCode < 500) { + switch (statusCode) { + case 400: // bad request + case 408: // request timeout + case 429: // too many requests + return true; + default: + return false; // all other 4xx errors are unrecoverable + } + } + return true; + } + + /** + * Logs an HTTP error or network error at the appropriate level and determines whether it is recoverable + * (as defined by {@link #isHttpErrorRecoverable(int)}). + * + * @param logger the logger to log to + * @param errorDesc description of the error + * @param errorContext a phrase like "when doing such-and-such" + * @param statusCode HTTP status code, or 0 for a network error + * @param recoverableMessage a phrase like "will retry" to use if the error is recoverable + * @return true if the error is recoverable + */ + public static boolean checkIfErrorIsRecoverableAndLog( + LDLogger logger, + String errorDesc, + String errorContext, + int statusCode, + String recoverableMessage + ) { + if (statusCode > 0 && !isHttpErrorRecoverable(statusCode)) { + logger.error("Error {} (giving up permanently): {}", errorContext, errorDesc); + return false; + } else { + logger.warn("Error {} ({}): {}", errorContext, recoverableMessage, errorDesc); + return true; + } + } + + /** + * Returns a text description of an HTTP error. + * + * @param statusCode the status code + * @return the error description + */ + public static String httpErrorDescription(int statusCode) { + return "HTTP error " + statusCode + + (statusCode == 401 || statusCode == 403 ? " (invalid SDK key)" : ""); + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpHelpers.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpHelpers.java new file mode 100644 index 0000000..8efaf42 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpHelpers.java @@ -0,0 +1,79 @@ +package com.launchdarkly.sdk.internal.http; + +import java.net.URI; +import java.util.List; + +import okhttp3.HttpUrl; + +/** + * Helper methods related to HTTP. + *

+ * This class is for internal use only and should not be documented in the SDK API. It is not + * supported for any use outside of the LaunchDarkly SDKs, and is subject to change without notice. + */ +public abstract class HttpHelpers { + private HttpHelpers() {} + + /** + * Safely concatenates a path, ensuring that there is exactly one slash between segments. + * + * @param uri the URI + * @param path the path to add + * @return a new URI + */ + public static URI concatenateUriPath(URI uri, String path) { + HttpUrl.Builder concatBuilder = HttpUrl.get(uri).newBuilder(); + HttpUrl concatted = concatBuilder.addPathSegments(path).build(); + List segments = concatted.pathSegments(); + + // now remove empty segments. go in reverse to preserve indexes during modification + HttpUrl.Builder sanitizedBuilder = concatted.newBuilder(); + for (int i = segments.size() - 1; i >= 0; i--) { + if (segments.get(i).isEmpty()) { + sanitizedBuilder.removePathSegment(i); + } + } + return sanitizedBuilder.build().uri(); + } + + /** + * Adds the query param to the URI. + * + * @param uri the URI + * @param name the name of the parameter + * @param value the value of the parameter + * @return the modified URI + */ + public static URI addQueryParam(URI uri, String name, String value) { + // it is important to use get(String) instead of get(URI) because get(String) will throw an exception + // that includes useful information for the user to diagnose their URI. + return HttpUrl.get(uri.toString()).newBuilder().addQueryParameter(name, value).build().uri(); +} + + /** + * Tests whether a string contains only characters that are safe to use in an HTTP header value. + *

+ * This is specifically testing whether the string would be considered a valid HTTP header value + * by the OkHttp client. The actual HTTP spec does not prohibit characters 127 and higher; OkHttp's + * check is overly strict, as was pointed out in https://github.com/square/okhttp/issues/2016. + * But all OkHttp 3.x and 4.x versions so far have continued to enforce that check. Control + * characters other than a tab are always illegal. + * + * The value we're mainly concerned with is the SDK key (Authorization header). If an SDK key + * accidentally has (for instance) a newline added to it, we don't want to end up having OkHttp + * throw an exception mentioning the value, which might get logged (https://github.com/square/okhttp/issues/6738). + * + * @param value a string + * @return true if valid + */ + public static boolean isAsciiHeaderValue(String value) { + for (int i = 0; i < value.length(); i++) { + char ch = value.charAt(i); + if ((ch < 0x20 || ch > 0x7e) && ch != '\t') { + return false; + } + } + return true; + } + +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpProperties.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpProperties.java new file mode 100644 index 0000000..3a7c1e4 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/HttpProperties.java @@ -0,0 +1,249 @@ +package com.launchdarkly.sdk.internal.http; + +import java.net.Proxy; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import javax.net.SocketFactory; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.X509TrustManager; + +import okhttp3.Authenticator; +import okhttp3.ConnectionPool; +import okhttp3.Headers; +import okhttp3.OkHttpClient; + +/** + * Internal container for HTTP parameters used by SDK components. Includes logic for creating an + * OkHttp client. + *

+ * This is separate from any public HTTP configuration/builder classes that are part of the SDK API. + * Those are transformed into this when the SDK is constructing components. The public API does not + * reference any OkHttp classes, but this internal class does. + */ +public final class HttpProperties { + private static final int DEFAULT_TIMEOUT = 10000; // not used by the SDKs, just prevents invalid test conditions + + private final long connectTimeoutMillis; + private final Map defaultHeaders; + private final HeadersTransformer headersTransformer; + private final Proxy proxy; + private final Authenticator proxyAuth; + private final OkHttpClient sharedHttpClient; + private final SocketFactory socketFactory; + private final long socketTimeoutMillis; + private final SSLSocketFactory sslSocketFactory; + private final X509TrustManager trustManager; + + /** + * Constructs an instance. + * + * @param connectTimeoutMillis connection timeout milliseconds + * @param defaultHeaders headers to add to all requests + * @param headersTransformer optional callback to modify headers + * @param proxy optional proxy + * @param proxyAuth optional proxy authenticator + * @param socketFactory optional socket factory + * @param socketTimeoutMillis socket timeout milliseconds + * @param sslSocketFactory optional SSL socket factory + * @param trustManager optional SSL trust manager + */ + public HttpProperties( + long connectTimeoutMillis, + Map defaultHeaders, + HeadersTransformer headersTransformer, + Proxy proxy, + Authenticator proxyAuth, + SocketFactory socketFactory, + long socketTimeoutMillis, + SSLSocketFactory sslSocketFactory, + X509TrustManager trustManager + ) { + super(); + this.connectTimeoutMillis = connectTimeoutMillis <= 0 ? DEFAULT_TIMEOUT : connectTimeoutMillis; + this.defaultHeaders = defaultHeaders == null ? Collections.emptyMap() : new HashMap<>(defaultHeaders); + this.headersTransformer = headersTransformer; + this.proxy = proxy; + this.proxyAuth = proxyAuth; + this.sharedHttpClient = null; + this.socketFactory = socketFactory; + this.socketTimeoutMillis = socketTimeoutMillis <= 0 ? DEFAULT_TIMEOUT : socketTimeoutMillis; + this.sslSocketFactory = sslSocketFactory; + this.trustManager = trustManager; + } + + /** + * Constructs an instance with a preconfigured shared HTTP client. + * + * @param sharedHttpClient an existing HTTP client instance + * @param defaultHeaders headers to add to all requests + * @param headersTransformer optional callback to modify headers + */ + public HttpProperties( + OkHttpClient sharedHttpClient, + Map defaultHeaders, + HeadersTransformer headersTransformer + ) { + super(); + this.defaultHeaders = defaultHeaders == null ? Collections.emptyMap() : new HashMap<>(defaultHeaders); + this.headersTransformer = headersTransformer; + this.sharedHttpClient = sharedHttpClient; + this.connectTimeoutMillis = DEFAULT_TIMEOUT; + this.proxy = null; + this.proxyAuth = null; + this.socketFactory = null; + this.socketTimeoutMillis = DEFAULT_TIMEOUT; + this.sslSocketFactory = null; + this.trustManager = null; + } + + /** + * Returns a minimal set of properties. + * + * @return a default instance + */ + public static HttpProperties defaults() { + return new HttpProperties(0, null, null, null, null, null, 0, null, null); + } + + /** + * Returns an immutable view of the default headers. This does not include applying + * the configured {@link HeadersTransformer}, if any. + * + * @return the default headers + * @see #toHeadersBuilder() + */ + public Iterable> getDefaultHeaders() { + return defaultHeaders.entrySet(); + } + + /** + * Returns an immutable view of the headers to add to a request. This includes applying + * the configured {@link HeadersTransformer}, if any. + * + * @return the default headers + * @see #toHeadersBuilder() + */ + public Iterable> getTransformedDefaultHeaders() { + if (headersTransformer == null) { + return defaultHeaders.entrySet(); + } + Map ret = new HashMap<>(defaultHeaders); + headersTransformer.updateHeaders(ret); + return ret.entrySet(); + } + + /** + * Returns the callback for transforming headers, if any. + * + * @return a {@link HeadersTransformer} or null + * @see #toHeadersBuilder() + */ + public HeadersTransformer getHeadersTransformer() { + return headersTransformer; + } + + /** + * Returns a preconfigured shared HTTP client, if one was defined. + *

+ * SDK components that use {@link HttpProperties} should check this method first before + * attempting to build their own client. If it returns a non-null value, they should use + * that client; in that case, no other properties except the default headers are relevant, + * and they should not take ownership of the client (that is, do not close the client when + * the component is closed). + * + * @return an HTTP client or null + */ + public OkHttpClient getSharedHttpClient() { + return sharedHttpClient; + } + + /** + * Applies the configured properties to an OkHttp client builder. + *

+ * SDK components that use {@link HttpProperties} should check {@link #getSharedHttpClient()} + * first before attempting to build their own client. The {@link #applyToHttpClientBuilder(okhttp3.OkHttpClient.Builder)} + * method will not provide a correct configuration if a shared client was specified. + * + * @param builder the client builder + */ + public void applyToHttpClientBuilder(OkHttpClient.Builder builder) { + builder.connectionPool(new ConnectionPool(5, 5, TimeUnit.SECONDS)); + if (connectTimeoutMillis > 0) { + builder.connectTimeout(connectTimeoutMillis, TimeUnit.MILLISECONDS); + } + if (socketTimeoutMillis > 0) { + builder.readTimeout(socketTimeoutMillis, TimeUnit.MILLISECONDS) + .writeTimeout(socketTimeoutMillis, TimeUnit.MILLISECONDS); + } + builder.retryOnConnectionFailure(false); // we will implement our own retry logic + + if (socketFactory != null) { + builder.socketFactory(socketFactory); + } + + if (sslSocketFactory != null) { + builder.sslSocketFactory(sslSocketFactory, trustManager); + } + + if (proxy != null) { + builder.proxy(proxy); + if (proxyAuth != null) { + builder.proxyAuthenticator(proxyAuth); + } + } + } + + /** + * Returns an OkHttp client builder initialized with the configured properties. + *

+ * SDK components that use {@link HttpProperties} should check {@link #getSharedHttpClient()} + * first before attempting to build their own client. The {@link #toHttpClientBuilder()} method + * will not provide a correct configuration if a shared client was specified. + * + * @return a client builder + */ + public OkHttpClient.Builder toHttpClientBuilder() { + OkHttpClient.Builder builder = new OkHttpClient.Builder(); + applyToHttpClientBuilder(builder); + return builder; + } + + /** + * Returns an OkHttp Headers builder initialized with the default headers. This includes + * calling the configured {@link HeadersTransformer}, if any. + * + * @return a Headers builder + */ + public Headers.Builder toHeadersBuilder() { + Headers.Builder builder = new Headers.Builder(); + for (Map.Entry kv: getTransformedDefaultHeaders()) { + builder.add(kv.getKey(), kv.getValue()); + } + return builder; + } + + /** + * Attempts to completely shut down an OkHttp client. + * + * @param client the client to stop + */ + public static void shutdownHttpClient(OkHttpClient client) { + if (client.dispatcher() != null) { + client.dispatcher().cancelAll(); + if (client.dispatcher().executorService() != null) { + client.dispatcher().executorService().shutdown(); + } + } + if (client.connectionPool() != null) { + client.connectionPool().evictAll(); + } + if (client.cache() != null) { + try { + client.cache().close(); + } catch (Exception e) {} + } + } +} diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/package-info.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/package-info.java new file mode 100644 index 0000000..a1003a8 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/http/package-info.java @@ -0,0 +1,10 @@ +/** + * This package contains HTTP-related helpers that can be used by both the Java SDK and the + * Android SDK, based on v4.x of the OkHttp client. + *

+ * All types in this package are for internal LaunchDarkly use only, and are subject to change. + * They are not part of the public supported API of the SDKs, and they should not be referenced + * by application code. They have public scope only because they need to be available to + * LaunchDarkly SDK code in other packages. + */ +package com.launchdarkly.sdk.internal.http; diff --git a/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/package-info.java b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/package-info.java new file mode 100644 index 0000000..e926d82 --- /dev/null +++ b/lib/shared/internal/src/main/java/com/launchdarkly/sdk/internal/package-info.java @@ -0,0 +1,10 @@ +/** + * This package contains general-purpose helpers that can be used by both the Java SDK and the + * Android SDK. + *

+ * All types in this package are for internal LaunchDarkly use only, and are subject to change. + * They are not part of the public supported API of the SDKs, and they should not be referenced + * by application code. They have public scope only because they need to be available to + * LaunchDarkly SDK code in other packages. + */ +package com.launchdarkly.sdk.internal; diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseInternalTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseInternalTest.java new file mode 100644 index 0000000..5dec117 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseInternalTest.java @@ -0,0 +1,50 @@ +package com.launchdarkly.sdk.internal; + +import com.launchdarkly.logging.LDLogAdapter; +import com.launchdarkly.logging.LDLogger; +import com.launchdarkly.logging.LogCapture; +import com.launchdarkly.logging.Logs; + +import org.junit.Rule; +import org.junit.rules.TestWatcher; +import org.junit.runner.Description; + +import static org.junit.Assume.assumeFalse; + +@SuppressWarnings("javadoc") +public class BaseInternalTest { + @Rule public DumpLogIfTestFails dumpLogIfTestFails; + + protected final LDLogAdapter testLogging; + protected final LDLogger testLogger; + protected final LogCapture logCapture; + + protected BaseInternalTest() { + if (!enableTestInAndroid()) { + assumeFalse("skipping test that isn't compatible with Android", isInAndroid()); + } + logCapture = Logs.capture(); + testLogging = logCapture; + testLogger = LDLogger.withAdapter(testLogging, ""); + dumpLogIfTestFails = new DumpLogIfTestFails(); + } + + protected boolean enableTestInAndroid() { + // Override this for tests that currently cannot run in our Android CI test job. + return true; + } + + protected boolean isInAndroid() { + String javaVendor = System.getProperty("java.vendor"); + return javaVendor != null && javaVendor.contains("Android"); + } + + class DumpLogIfTestFails extends TestWatcher { + @Override + protected void failed(Throwable e, Description description) { + for (LogCapture.Message message: logCapture.getMessages()) { + System.out.println("LOG {" + description.getDisplayName() + "} >>> " + message.toStringWithTimestamp()); + } + } + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseTest.java new file mode 100644 index 0000000..50ad404 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/BaseTest.java @@ -0,0 +1,11 @@ +package com.launchdarkly.sdk.internal; + +/** + * The only purpose of this class is to support the somewhat roundabout mechanism we use in CI to run + * all of our unit tests in an Android environment too. All unit tests in this project should be either + * directly or indirectly descended from this class. Then, when we run the Android tests, we replace + * this class with another version (from src/androidTest/java) that has the necessary Android test + * runner annotation on it. + */ +public abstract class BaseTest extends BaseInternalTest { +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/BaseEventTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/BaseEventTest.java new file mode 100644 index 0000000..3ef138e --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/BaseEventTest.java @@ -0,0 +1,565 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import com.google.gson.Gson; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.EvaluationDetail; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.internal.BaseTest; +import com.launchdarkly.testhelpers.JsonAssertions; +import com.launchdarkly.testhelpers.JsonTestValue; + +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.net.URI; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import static com.launchdarkly.testhelpers.ConcurrentHelpers.assertNoMoreValues; +import static com.launchdarkly.testhelpers.ConcurrentHelpers.awaitValue; +import static com.launchdarkly.testhelpers.JsonAssertions.isJsonArray; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonEqualsValue; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonProperty; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonUndefined; +import static com.launchdarkly.testhelpers.JsonTestValue.jsonFromValue; +import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; +import static org.hamcrest.Matchers.allOf; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public abstract class BaseEventTest extends BaseTest { + public static final String SDK_KEY = "SDK_KEY"; + public static final long FAKE_TIME = 100000; + public static final String FLAG_KEY = "flagkey"; + public static final int FLAG_VERSION = 11; + public static final URI FAKE_URI = URI.create("http://fake"); + public static final LDContext user = LDContext.builder("userkey").name("Red").build(); + public static final Gson gson = new Gson(); + public static final LDValue userJson = LDValue.buildObject().put("kind", "user") + .put("key", "userkey").put("name", "Red").build(); + public static final LDValue filteredUserJson = LDValue.buildObject().put("kind", "user") + .put("key", "userkey").put("_meta", LDValue.parse("{\"redactedAttributes\":[\"name\"]}")).build(); + + public static ScheduledExecutorService sharedExecutor = newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("tests-sharedExecutor-%d").build()); + + public static void assertJsonEquals(LDValue expected, LDValue actual) { + JsonAssertions.assertJsonEquals(expected.toJsonString(), actual.toJsonString()); + } + + public static EventsConfigurationBuilder baseConfig(EventSender es) { + return new EventsConfigurationBuilder().eventSender(es); + } + + public DefaultEventProcessor makeEventProcessor(EventsConfigurationBuilder ec) { + return makeEventProcessor(ec, null); + } + + public DefaultEventProcessor makeEventProcessor( + EventsConfigurationBuilder ec, + DiagnosticStore diagnosticStore + ) { + return new DefaultEventProcessor( + ec.build(), + sharedExecutor, + Thread.MAX_PRIORITY, + testLogger + ); + } + + public static EventsConfiguration defaultEventsConfig() { + return makeEventsConfig(false, null); + } + + public static EventsConfiguration makeEventsConfig(boolean allAttributesPrivate, + Collection privateAttributes) { + return new EventsConfigurationBuilder() + .allAttributesPrivate(allAttributesPrivate) + .privateAttributes(privateAttributes == null ? null : new HashSet<>(privateAttributes)) + .build(); + } + + public static EvaluationDetail simpleEvaluation(int variation, LDValue value) { + return EvaluationDetail.fromValue(value, variation, EvaluationReason.off()); + } + + static final class CapturedPayload { + final boolean diagnostic; + final String data; + final int eventCount; + final URI eventsBaseUri; + + CapturedPayload(boolean diagnostic, String data, int eventCount, URI eventsBaseUri) { + this.diagnostic = diagnostic; + this.data = data; + this.eventCount = eventCount; + assertNotNull(eventsBaseUri); + this.eventsBaseUri = eventsBaseUri; + } + } + + public final class MockEventSender implements EventSender { + volatile boolean closed; + volatile Result result = new Result(true, false, null); + volatile RuntimeException fakeError = null; + volatile IOException fakeErrorOnClose = null; + volatile CountDownLatch receivedCounter = null; + volatile Object waitSignal = null; + + final BlockingQueue receivedParams = new LinkedBlockingQueue<>(); + + @Override + public Result sendAnalyticsEvents(byte[] data, int eventCount, URI eventsBaseUri) { + testLogger.debug("[MockEventSender] received {} events: {}", eventCount, new String(data)); + return receive(false, data, eventCount, eventsBaseUri); + } + + @Override + public Result sendDiagnosticEvent(byte[] data, URI eventsBaseUri) { + testLogger.debug("[MockEventSender] received diagnostic event: {}", new String(data)); + return receive(true, data, 1, eventsBaseUri); + } + + @Override + public void close() throws IOException { + closed = true; + if (fakeErrorOnClose != null) { + throw fakeErrorOnClose; + } + } + + private Result receive(boolean diagnostic, byte[] data, int eventCount, URI eventsBaseUri) { + receivedParams.add(new CapturedPayload(diagnostic, new String(data, Charset.forName("UTF-8")), eventCount, eventsBaseUri)); + if (waitSignal != null) { + // this is used in DefaultEventProcessorTest.eventsAreKeptInBufferIfAllFlushWorkersAreBusy + synchronized (waitSignal) { + if (receivedCounter != null) { + receivedCounter.countDown(); + } + try { + waitSignal.wait(); + } catch (InterruptedException e) {} + } + } + if (fakeError != null) { + throw fakeError; + } + return result; + } + + CapturedPayload awaitRequest() { + return awaitValue(receivedParams, 5, TimeUnit.SECONDS); + } + + CapturedPayload awaitAnalytics() { + CapturedPayload p = awaitValue(receivedParams, 5, TimeUnit.SECONDS); + assertFalse("expected analytics event but got diagnostic event instead", p.diagnostic); + return p; + } + + CapturedPayload awaitDiagnostic() { + CapturedPayload p = awaitValue(receivedParams, 5, TimeUnit.SECONDS); + assertTrue("expected a diagnostic event but got analytics events instead", p.diagnostic); + return p; + } + + void expectNoRequests(long timeoutMillis) { + assertNoMoreValues(receivedParams, timeoutMillis, TimeUnit.MILLISECONDS); + } + + List getEventsFromLastRequest() { + CapturedPayload p = awaitRequest(); + LDValue a = LDValue.parse(p.data); + assertEquals(p.eventCount, a.size()); + List ret = new ArrayList<>(); + for (LDValue v: a.values()) { + ret.add(jsonFromValue(v)); + } + return ret; + } + } + + public static Matcher isIdentifyEvent(Event sourceEvent, LDValue context) { + return allOf( + jsonProperty("kind", "identify"), + jsonProperty("creationDate", (double)sourceEvent.getCreationDate()), + jsonProperty("context", jsonFromValue(context)) + ); + } + + public static Matcher isMigrationEvent(Event sourceEvent, LDValue context) { + // Doesn't fully test an event, but makes sure it is a specific event. + return allOf( + jsonProperty("kind", "migration_op"), + jsonProperty("creationDate", (double)sourceEvent.getCreationDate()), + hasContextKeys(sourceEvent) + ); + } + + public static Matcher isIndexEvent() { + return jsonProperty("kind", "index"); + } + + public static Matcher isIndexEvent(Event sourceEvent, LDValue context) { + return allOf( + jsonProperty("kind", "index"), + jsonProperty("creationDate", (double)sourceEvent.getCreationDate()), + jsonProperty("context", jsonFromValue(context)) + ); + } + + public static Matcher isFeatureEvent(Event.FeatureRequest sourceEvent, LDValue inlineContext) { + return isFeatureOrDebugEvent(sourceEvent, inlineContext, false); + } + + public static Matcher isDebugEvent(Event.FeatureRequest sourceEvent, LDValue inlineContext) { + return isFeatureOrDebugEvent(sourceEvent, inlineContext, true); + } + + @SuppressWarnings("unchecked") + private static Matcher isFeatureOrDebugEvent(Event.FeatureRequest sourceEvent, + LDValue inlineContext, boolean debug) { + return allOf( + jsonProperty("kind", debug ? "debug" : "feature"), + jsonProperty("creationDate", (double)sourceEvent.getCreationDate()), + jsonProperty("key", sourceEvent.getKey()), + jsonProperty("version", sourceEvent.getVersion()), + jsonProperty("variation", sourceEvent.getVariation()), + jsonProperty("value", jsonFromValue(sourceEvent.getValue())), + hasInlineContext(inlineContext), + jsonProperty("reason", sourceEvent.getReason() == null ? jsonUndefined() : jsonEqualsValue(sourceEvent.getReason())), + jsonProperty("prereqOf", sourceEvent.getPrereqOf() == null ? jsonUndefined() : jsonEqualsValue(sourceEvent.getPrereqOf())) + ); + } + + public static Matcher isCustomEvent(Event.Custom sourceEvent) { + boolean hasData = sourceEvent.getData() != null && !sourceEvent.getData().isNull(); + return allOf( + jsonProperty("kind", "custom"), + jsonProperty("creationDate", (double)sourceEvent.getCreationDate()), + jsonProperty("key", sourceEvent.getKey()), + hasContextKeys(sourceEvent), + jsonProperty("data", hasData ? jsonEqualsValue(sourceEvent.getData()) : jsonUndefined()), + jsonProperty("metricValue", sourceEvent.getMetricValue() == null ? jsonUndefined() : jsonEqualsValue(sourceEvent.getMetricValue())) + ); + } + + public static Matcher hasContextKeys(Event sourceEvent) { + ObjectBuilder b = LDValue.buildObject(); + LDContext c = sourceEvent.getContext(); + for (int i = 0; i < c.getIndividualContextCount(); i++) { + LDContext c1 = c.getIndividualContext(i); + b.put(c1.getKind().toString(), c1.getKey()); + } + return jsonProperty("contextKeys", jsonEqualsValue(b.build())); + } + + public static Matcher hasInlineContext(LDValue inlineContext) { + return allOf( + jsonProperty("context", jsonEqualsValue(inlineContext)), + jsonProperty("contextKeys", jsonUndefined()) + ); + } + + public static Matcher isSummaryEvent() { + return jsonProperty("kind", "summary"); + } + + public static Matcher isSummaryEvent(long startDate, long endDate) { + return allOf( + jsonProperty("kind", "summary"), + jsonProperty("startDate", (double)startDate), + jsonProperty("endDate", (double)endDate) + ); + } + + public static Matcher hasSummaryFlag(String key, LDValue defaultVal, Matcher> counters) { + return jsonProperty("features", + jsonProperty(key, allOf( + jsonProperty("default", jsonFromValue(defaultVal)), + jsonProperty("counters", isJsonArray(counters)) + ))); + } + + public static Matcher isSummaryEventCounter(int flagVersion, Integer variation, LDValue value, int count) { + return allOf( + jsonProperty("variation", variation), + jsonProperty("version", (double)flagVersion), + jsonProperty("value", jsonFromValue(value)), + jsonProperty("count", (double)count) + ); + } + + public static FeatureRequestEventBuilder featureEvent(LDContext context, String flagKey) { + return new FeatureRequestEventBuilder(context, flagKey); + } + + public static CustomEventBuilder customEvent(LDContext context, String flagKey) { + return new CustomEventBuilder(context, flagKey); + } + + public static Event.Identify identifyEvent(LDContext context) { + return new Event.Identify(FAKE_TIME, context); + } + + /** + * This builder is similar to the public SDK configuration builder for events, except it is building + * the internal config object for the lower-level event processing code. This allows us to test that + * code independently of the rest of the SDK. Note that the default values here are deliberately not + * the same as the defaults in the SDK; they are chosen to make it unlikely for tests to be affected + * by any behavior we're not specifically trying to test-- for instance, a long flush interval means + * that flushes normally won't happen, and any test where we want flushes to happen will not rely on + * the defaults. + *

+ * This is defined only in test code, instead of as an inner class of EventsConfiguration, because + * in non-test code there's only one place where we ever construct EventsConfiguration. + */ + public static class EventsConfigurationBuilder { + private boolean allAttributesPrivate = false; + private int capacity = 1000; + private EventContextDeduplicator contextDeduplicator = null; + private long diagnosticRecordingIntervalMillis = 1000000; + private DiagnosticStore diagnosticStore = null; + private EventSender eventSender = null; + private int eventSendingThreadPoolSize = EventsConfiguration.DEFAULT_EVENT_SENDING_THREAD_POOL_SIZE; + private URI eventsUri = URI.create("not-valid"); + private long flushIntervalMillis = 1000000; + private boolean initiallyInBackground = false; + private boolean initiallyOffline = false; + private Set privateAttributes = new HashSet<>(); + + public EventsConfiguration build() { + return new EventsConfiguration( + allAttributesPrivate, + capacity, + contextDeduplicator, + diagnosticRecordingIntervalMillis, + diagnosticStore, + eventSender, + eventSendingThreadPoolSize, + eventsUri, + flushIntervalMillis, + initiallyInBackground, + initiallyOffline, + privateAttributes + ); + } + + public EventsConfigurationBuilder allAttributesPrivate(boolean allAttributesPrivate) { + this.allAttributesPrivate = allAttributesPrivate; + return this; + } + + public EventsConfigurationBuilder capacity(int capacity) { + this.capacity = capacity; + return this; + } + + public EventsConfigurationBuilder contextDeduplicator(EventContextDeduplicator contextDeduplicator) { + this.contextDeduplicator = contextDeduplicator; + return this; + } + + public EventsConfigurationBuilder diagnosticRecordingIntervalMillis(long diagnosticRecordingIntervalMillis) { + this.diagnosticRecordingIntervalMillis = diagnosticRecordingIntervalMillis; + return this; + } + + public EventsConfigurationBuilder diagnosticStore(DiagnosticStore diagnosticStore) { + this.diagnosticStore = diagnosticStore; + return this; + } + + public EventsConfigurationBuilder eventSender(EventSender eventSender) { + this.eventSender = eventSender; + return this; + } + + public EventsConfigurationBuilder eventSendingThreadPoolSize(int eventSendingThreadPoolSize) { + this.eventSendingThreadPoolSize = eventSendingThreadPoolSize; + return this; + } + + public EventsConfigurationBuilder eventsUri(URI eventsUri) { + this.eventsUri = eventsUri; + return this; + } + + public EventsConfigurationBuilder flushIntervalMillis(long flushIntervalMillis) { + this.flushIntervalMillis = flushIntervalMillis; + return this; + } + + public EventsConfigurationBuilder initiallyInBackground(boolean initiallyInBackground) { + this.initiallyInBackground = initiallyInBackground; + return this; + } + + public EventsConfigurationBuilder initiallyOffline(boolean initiallyOffline) { + this.initiallyOffline = initiallyOffline; + return this; + } + + public EventsConfigurationBuilder privateAttributes(Set privateAttributes) { + this.privateAttributes = privateAttributes; + return this; + } + } + + public static EventContextDeduplicator contextDeduplicatorThatAlwaysSaysKeysAreNew() { + return new EventContextDeduplicator() { + @Override + public Long getFlushInterval() { + return null; + } + + @Override + public boolean processContext(LDContext context) { + return true; + } + + @Override + public void flush() {} + }; + } + + public static EventContextDeduplicator contextDeduplicatorThatSaysKeyIsNewOnFirstCallOnly() { + return new EventContextDeduplicator() { + private int calls = 0; + + @Override + public Long getFlushInterval() { + return null; + } + + @Override + public boolean processContext(LDContext context) { + ++calls; + return calls == 1; + } + + @Override + public void flush() {} + }; + } + + public static final class FeatureRequestEventBuilder { + private long timestamp = FAKE_TIME; + private LDContext context; + private String flagKey; + private int flagVersion = 100; + private int variation = 1; + private LDValue value = LDValue.of("value"); + private EvaluationReason reason = null; + private LDValue defaultValue = LDValue.of("default"); + private String prereqOf = null; + private boolean trackEvents = false; + private Long debugEventsUntilDate = null; + private long samplingRatio = 1; + private boolean excludeFromSummaries = false; + + public FeatureRequestEventBuilder(LDContext context, String flagKey) { + this.context = context; + this.flagKey = flagKey; + } + + public Event.FeatureRequest build() { + return new Event.FeatureRequest(timestamp, flagKey, context, flagVersion, variation, value, + defaultValue, reason, prereqOf, trackEvents, debugEventsUntilDate, false, samplingRatio, + excludeFromSummaries); + } + + public FeatureRequestEventBuilder flagVersion(int flagVersion) { + this.flagVersion = flagVersion; + return this; + } + + public FeatureRequestEventBuilder variation(int variation) { + this.variation = variation; + return this; + } + + public FeatureRequestEventBuilder value(LDValue value) { + this.value = value; + return this; + } + + public FeatureRequestEventBuilder defaultValue(LDValue defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public FeatureRequestEventBuilder reason(EvaluationReason reason) { + this.reason = reason; + return this; + } + + public FeatureRequestEventBuilder prereqOf(String prereqOf) { + this.prereqOf = prereqOf; + return this; + } + + public FeatureRequestEventBuilder trackEvents(boolean trackEvents) { + this.trackEvents = trackEvents; + return this; + } + + public FeatureRequestEventBuilder debugEventsUntilDate(Long debugEventsUntilDate) { + this.debugEventsUntilDate = debugEventsUntilDate; + return this; + } + + public FeatureRequestEventBuilder excludeFromSummaries(boolean excludeFromSummaries) { + this.excludeFromSummaries = excludeFromSummaries; + return this; + } + + public FeatureRequestEventBuilder samplingRatio(long samplingRatio) { + this.samplingRatio = samplingRatio; + return this; + } + } + + public static final class CustomEventBuilder { + private long timestamp = FAKE_TIME; + private LDContext context; + private String eventKey; + private LDValue data = LDValue.ofNull(); + private Double metricValue = null; + + public CustomEventBuilder(LDContext context, String eventKey) { + this.context = context; + this.eventKey = eventKey; + } + + public Event.Custom build() { + return new Event.Custom(timestamp, eventKey, context, data, metricValue); + } + + public CustomEventBuilder data(LDValue data) { + this.data = data; + return this; + } + + public CustomEventBuilder metricValue(Double metricValue) { + this.metricValue = metricValue; + return this; + } + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorDiagnosticsTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorDiagnosticsTest.java new file mode 100644 index 0000000..3e95ac6 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorDiagnosticsTest.java @@ -0,0 +1,214 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDValue; + +import org.junit.Test; + +import java.net.URI; + +import static com.launchdarkly.testhelpers.JsonAssertions.jsonNull; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonProperty; +import static com.launchdarkly.testhelpers.JsonTestValue.jsonOf; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertNotNull; + +/** + * These DefaultEventProcessor tests cover diagnostic event behavior. + */ +@SuppressWarnings("javadoc") +public class DefaultEventProcessorDiagnosticsTest extends BaseEventTest { + private static LDValue fakePlatformData = LDValue.buildObject().put("cats", 2).build(); + + private DiagnosticId diagnosticId; + private DiagnosticStore diagnosticStore; + + public DefaultEventProcessorDiagnosticsTest() { + diagnosticStore = new DiagnosticStore( + new DiagnosticStore.SdkDiagnosticParams( + SDK_KEY, + "fake-sdk", + "1.2.3", + "fake-platform", + fakePlatformData, + null, + null + )); + diagnosticId = diagnosticStore.getDiagnosticId(); + } + + @Test + public void diagnosticEventsSentToDiagnosticEndpoint() throws Exception { + MockEventSender es = new MockEventSender(); + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).diagnosticStore(diagnosticStore))) { + CapturedPayload initReq = es.awaitDiagnostic(); + ep.postDiagnostic(); + CapturedPayload periodicReq = es.awaitDiagnostic(); + + assertThat(initReq.diagnostic, is(true)); + assertThat(periodicReq.diagnostic, is(true)); + } + } + + @Test + public void initialDiagnosticEventHasInitBody() throws Exception { + MockEventSender es = new MockEventSender(); + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).diagnosticStore(diagnosticStore))) { + CapturedPayload req = es.awaitDiagnostic(); + + assertThat(jsonOf(req.data), allOf( + jsonProperty("kind", "diagnostic-init"), + jsonProperty("id", jsonProperty("diagnosticId", diagnosticId.diagnosticId)), + jsonProperty("id", jsonProperty("sdkKeySuffix", diagnosticId.sdkKeySuffix)), + jsonProperty("configuration", not(jsonNull())), + jsonProperty("sdk", not(jsonNull())), + jsonProperty("platform", not(jsonNull())) + )); + } + } + + @SuppressWarnings("unchecked") + @Test + public void periodicDiagnosticEventHasStatisticsBody() throws Exception { + MockEventSender es = new MockEventSender(); + long dataSinceDate = diagnosticStore.getDataSinceDate(); + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).diagnosticStore(diagnosticStore))) { + // Ignore the initial diagnostic event + es.awaitDiagnostic(); + ep.postDiagnostic(); + CapturedPayload periodicReq = es.awaitDiagnostic(); + + assertThat(jsonOf(periodicReq.data), allOf( + jsonProperty("kind", "diagnostic"), + jsonProperty("id", jsonProperty("diagnosticId", diagnosticId.diagnosticId)), + jsonProperty("id", jsonProperty("sdkKeySuffix", diagnosticId.sdkKeySuffix)), + jsonProperty("dataSinceDate", dataSinceDate), + jsonProperty("creationDate", diagnosticStore.getDataSinceDate()), + jsonProperty("deduplicatedUsers", 0), + jsonProperty("eventsInLastBatch", 0), + jsonProperty("droppedEvents", 0) + )); + } + } + + @Test + public void periodicDiagnosticEventGetsEventsInLastBatchAndDeduplicatedUsers() throws Exception { + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe1 = featureEvent(user, "flagkey1").build(); + Event.FeatureRequest fe2 = featureEvent(user, "flagkey2").build(); + + // Create a fake deduplicator that just says "not seen" for the first call and "seen" thereafter + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatSaysKeyIsNewOnFirstCallOnly(); + + try (DefaultEventProcessor ep = makeEventProcessor( + baseConfig(es).contextDeduplicator(contextDeduplicator).diagnosticStore(diagnosticStore))) { + // Ignore the initial diagnostic event + es.awaitDiagnostic(); + + ep.sendEvent(fe1); + ep.sendEvent(fe2); + ep.flushAsync(); + // Ignore normal events + es.awaitAnalytics(); + + ep.postDiagnostic(); + CapturedPayload periodicReq = es.awaitRequest(); + + assertNotNull(periodicReq); + + assertThat(jsonOf(periodicReq.data), allOf( + jsonProperty("deduplicatedUsers", 1), + jsonProperty("eventsInLastBatch", 2), // 1 index event + 1 summary event + jsonProperty("droppedEvents", 0) + )); + } + } + + @Test + public void periodicDiagnosticEventsAreSentAutomatically() throws Exception { + MockEventSender es = new MockEventSender(); + + EventsConfigurationBuilder eventsConfig = makeEventsConfigurationWithBriefDiagnosticInterval(es); + + try (DefaultEventProcessor ep = makeEventProcessor(eventsConfig.diagnosticStore(diagnosticStore))) { + // Ignore the initial diagnostic event + es.awaitDiagnostic(); + + CapturedPayload periodicReq = es.awaitRequest(); + + assertNotNull(periodicReq); + + assertThat(jsonOf(periodicReq.data), jsonProperty("kind", "diagnostic")); + } + } + + @Test + public void periodicDiagnosticEventsAreNotSentWhenInBackground() throws Exception { + MockEventSender es = new MockEventSender(); + + EventsConfigurationBuilder eventsConfig = makeEventsConfigurationWithBriefDiagnosticInterval(es); + + try (DefaultEventProcessor ep = makeEventProcessor(eventsConfig.diagnosticStore(diagnosticStore))) { + // Ignore the initial diagnostic event + es.awaitDiagnostic(); + + // Expect a periodic diagnostic event + es.awaitDiagnostic(); + + // Now turn on background mode, which should make periodic events stop. + ep.setInBackground(true); + + try { + es.expectNoRequests(200); + } catch (AssertionError e) { + // Might have been a race condition where an event got scheduled before the background mode change; + // if so, there should be a gap with no events after that, so try the assertion again. + es.expectNoRequests(200); + } + + // Turn off background mode; periodic events should resume + ep.setInBackground(false); + + es.awaitDiagnostic(); + } + } + + private EventsConfigurationBuilder makeEventsConfigurationWithBriefDiagnosticInterval(EventSender es) { + return baseConfig(es).diagnosticRecordingIntervalMillis(50); + } + + @Test + public void diagnosticEventsStopAfter401Error() throws Exception { + // This is easier to test with a mock component than it would be in LDClientEndToEndTest, because + // we don't have to worry about the latency of a real HTTP request which could allow the periodic + // task to fire again before we received a response. In real life, that wouldn't matter because + // the minimum diagnostic interval is so long, but in a test we need to be able to use a short + // interval. + MockEventSender es = new MockEventSender(); + es.result = new EventSender.Result(false, true, null); // mustShutdown=true; this is what would be returned for a 401 error + + EventsConfigurationBuilder eventsConfig = makeEventsConfigurationWithBriefDiagnosticInterval(es); + + try (DefaultEventProcessor ep = makeEventProcessor(eventsConfig.diagnosticStore(diagnosticStore))) { + // Ignore the initial diagnostic event + es.awaitDiagnostic(); + + es.expectNoRequests(100); + } + } + + @Test + public void customBaseUriIsPassedToEventSenderForDiagnosticEvents() throws Exception { + MockEventSender es = new MockEventSender(); + URI uri = URI.create("fake-uri"); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).eventsUri(uri).diagnosticStore(diagnosticStore))) { + } + + CapturedPayload p = es.awaitRequest(); + assertThat(p.eventsBaseUri, equalTo(uri)); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorOutputTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorOutputTest.java new file mode 100644 index 0000000..684004a --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorOutputTest.java @@ -0,0 +1,458 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; + +import com.launchdarkly.testhelpers.JsonTestValue; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Date; +import java.util.List; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; + +/** + * These DefaultEventProcessor tests cover the specific content that should appear in event payloads. + */ +@SuppressWarnings("javadoc") +public class DefaultEventProcessorOutputTest extends BaseEventTest { + private static final LDContext invalidContext = LDContext.create(null); + + // Note: context deduplication behavior has been abstracted out of DefaultEventProcessor, so that + // by default it does not generate any index events. Test cases in this file that are not + // specifically related to index events use this default behavior, and do not expect to see any. + // When we are specifically testing this behavior, we substitute a mock EventContextDeduplicator + // so we can verify how its outputs affect DefaultEventProcessor. + + @Test + public void identifyEventIsQueued() throws Exception { + MockEventSender es = new MockEventSender(); + Event e = identifyEvent(user); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(e); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIdentifyEvent(e, userJson) + )); + } + + @Test + public void userIsFilteredInIdentifyEvent() throws Exception { + MockEventSender es = new MockEventSender(); + Event e = identifyEvent(user); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).allAttributesPrivate(true))) { + ep.sendEvent(e); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIdentifyEvent(e, filteredUserJson) + )); + } + + @Test + public void identifyEventWithNullContextOrInvalidContextDoesNotCauseError() throws Exception { + // This should never happen because LDClient.identify() rejects such a user, but just in case, + // we want to make sure it doesn't blow up the event processor. + MockEventSender es = new MockEventSender(); + Event event1 = identifyEvent(invalidContext); + Event event2 = identifyEvent(null); + Event event3 = identifyEvent(user); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(event1); + ep.sendEvent(event2); + ep.sendEvent(event3); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIdentifyEvent(event3, userJson) + )); + } + + @SuppressWarnings("unchecked") + @Test + public void individualFeatureEventIsQueuedWithIndexEvent() throws Exception { + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).trackEvents(true).build(); + + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatAlwaysSaysKeysAreNew(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).contextDeduplicator(contextDeduplicator))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIndexEvent(fe, userJson), + isFeatureEvent(fe, userJson), + isSummaryEvent() + )); + } + + @Test + public void featureEventWith0SamplingRatioIsNotSampled() throws Exception { + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).trackEvents(true).samplingRatio(0).build(); + + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatAlwaysSaysKeysAreNew(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).contextDeduplicator(contextDeduplicator))) { + ep.sendEvent(fe); + } + + List events = es.getEventsFromLastRequest(); + assertThat(events, contains( + isIndexEvent(fe, userJson), + isSummaryEvent() + )); + // No feature event. + Assert.assertEquals(2, events.size()); + } + + @Test + public void featureEventCanBeExcludedFromSummaries() throws Exception { + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).trackEvents(true).excludeFromSummaries(true).build(); + + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatAlwaysSaysKeysAreNew(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).contextDeduplicator(contextDeduplicator))) { + ep.sendEvent(fe); + } + + List events = es.getEventsFromLastRequest(); + assertThat(events, contains( + isIndexEvent(fe, userJson), + isFeatureEvent(fe, userJson) + )); + // No feature event. + Assert.assertEquals(2, events.size()); + } + + @SuppressWarnings("unchecked") + @Test + public void userIsFilteredInIndexEvent() throws Exception { + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).build(); + + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatAlwaysSaysKeysAreNew(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).allAttributesPrivate(true).contextDeduplicator(contextDeduplicator))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIndexEvent(fe, filteredUserJson), + isSummaryEvent() + )); + } + + @SuppressWarnings("unchecked") + @Test + public void featureEventCanBeForPrerequisite() throws Exception { + MockEventSender es = new MockEventSender(); + String prereqKey = "prereqkey"; + Event.FeatureRequest fe = featureEvent(user, prereqKey).prereqOf(FLAG_KEY).trackEvents(true).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isFeatureEvent(fe, userJson), + isSummaryEvent() + )); + } + + @Test + public void featureEventWithNullContextOrInvalidContextIsIgnored() throws Exception { + // This should never happen because LDClient rejects such a user, but just in case, + // we want to make sure it doesn't blow up the event processor. + MockEventSender es = new MockEventSender(); + Event.FeatureRequest event1 = featureEvent(invalidContext, FLAG_KEY).build(); + Event.FeatureRequest event2 = featureEvent(null, FLAG_KEY).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es) + .allAttributesPrivate(true))) { + ep.sendEvent(event1); + ep.sendEvent(event2); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isSummaryEvent() + )); + } + + @SuppressWarnings("unchecked") + @Test + public void featureEventCanContainReason() throws Exception { + MockEventSender es = new MockEventSender(); + EvaluationReason reason = EvaluationReason.ruleMatch(1, null); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).reason(reason).trackEvents(true).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isFeatureEvent(fe, userJson), + isSummaryEvent() + )); + } + + @SuppressWarnings("unchecked") + @Test + public void eventKindIsDebugIfFlagIsTemporarilyInDebugMode() throws Exception { + MockEventSender es = new MockEventSender(); + long futureTime = System.currentTimeMillis() + 1000000; + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).debugEventsUntilDate(futureTime).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isDebugEvent(fe, userJson), + isSummaryEvent() + )); + } + + @SuppressWarnings("unchecked") + @Test + public void eventCanBeBothTrackedAndDebugged() throws Exception { + MockEventSender es = new MockEventSender(); + long futureTime = System.currentTimeMillis() + 1000000; + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).trackEvents(true).debugEventsUntilDate(futureTime).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isFeatureEvent(fe, userJson), + isDebugEvent(fe, userJson), + isSummaryEvent() + )); + } + + @Test + public void debugModeExpiresBasedOnClientTimeIfClientTimeIsLaterThanServerTime() throws Exception { + MockEventSender es = new MockEventSender(); + + // Pick a server time that is somewhat behind the client time + long serverTime = System.currentTimeMillis() - 20000; + es.result = new EventSender.Result(true, false, new Date(serverTime)); + + long debugUntil = serverTime + 1000; + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).debugEventsUntilDate(debugUntil).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + // Send and flush an event we don't care about, just so we'll receive "resp1" which sets the last server time + ep.sendEvent(identifyEvent(LDContext.create("otherUser"))); + ep.flushBlocking(); // wait till flush is done so we know we received the first response, with the date + es.awaitRequest(); + + es.receivedParams.clear(); + es.result = new EventSender.Result(true, false, null); + + // Now send an event with debug mode on, with a "debug until" time that is further in + // the future than the server time, but in the past compared to the client. + ep.sendEvent(fe); + } + + // Should get a summary event only, not a full feature event + assertThat(es.getEventsFromLastRequest(), contains( + isSummaryEvent(fe.getCreationDate(), fe.getCreationDate()) + )); + } + + @Test + public void debugModeExpiresBasedOnServerTimeIfServerTimeIsLaterThanClientTime() throws Exception { + MockEventSender es = new MockEventSender(); + + // Pick a server time that is somewhat ahead of the client time + long serverTime = System.currentTimeMillis() + 20000; + es.result = new EventSender.Result(true, false, new Date(serverTime)); + + long debugUntil = serverTime - 1000; + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).debugEventsUntilDate(debugUntil).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + // Send and flush an event we don't care about, just to set the last server time + ep.sendEvent(identifyEvent(LDContext.create("otherUser"))); + ep.flushBlocking(); // wait till flush is done so we know we received the first response, with the date + es.awaitRequest(); + + es.receivedParams.clear(); + es.result = new EventSender.Result(true, false, null); + + // Now send an event with debug mode on, with a "debug until" time that is further in + // the future than the client time, but in the past compared to the server. + ep.sendEvent(fe); + } + + // Should get a summary event only, not a full feature event + assertThat(es.getEventsFromLastRequest(), contains( + isSummaryEvent(fe.getCreationDate(), fe.getCreationDate()) + )); + } + + @SuppressWarnings("unchecked") + @Test + public void twoFeatureEventsForSameContextGenerateOnlyOneIndexEvent() throws Exception { + // More accurately, this is testing that DefaultEventProcessor respects whatever the + // EventContextDeduplicator says about whether a context key is new or not. We will set up + // an EventContextDeduplicator that reports "new" on the first call and "not new" on the 2nd. + EventContextDeduplicator contextDeduplicator = contextDeduplicatorThatSaysKeyIsNewOnFirstCallOnly(); + + MockEventSender es = new MockEventSender(); + Event.FeatureRequest fe1 = featureEvent(user, "flagkey1").trackEvents(true).build(); + Event.FeatureRequest fe2 = featureEvent(user, "flagkey2").trackEvents(true).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).contextDeduplicator(contextDeduplicator))) { + ep.sendEvent(fe1); + ep.sendEvent(fe2); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIndexEvent(fe1, userJson), + isFeatureEvent(fe1, userJson), + isFeatureEvent(fe2, userJson), + isSummaryEvent(fe1.getCreationDate(), fe2.getCreationDate()) + )); + } + + @SuppressWarnings("unchecked") + @Test + public void identifyEventMakesIndexEventUnnecessary() throws Exception { + MockEventSender es = new MockEventSender(); + Event ie = new Event.Identify(FAKE_TIME, user); + Event.FeatureRequest fe = featureEvent(user, FLAG_KEY).trackEvents(true).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(ie); + ep.sendEvent(fe); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isIdentifyEvent(ie, userJson), + isFeatureEvent(fe, userJson), + isSummaryEvent() + )); + } + + + @SuppressWarnings("unchecked") + @Test + public void nonTrackedEventsAreSummarized() throws Exception { + MockEventSender es = new MockEventSender(); + String flagkey1 = "flagkey1", flagkey2 = "flagkey2"; + int version1 = 11, version2 = 22; + LDValue value1 = LDValue.of("value1"), value2 = LDValue.of("value2"); + LDValue default1 = LDValue.of("default1"), default2 = LDValue.of("default2"); + Event fe1a = featureEvent(user, flagkey1).flagVersion(version1) + .variation(1).value(value1).defaultValue(default1).build(); + Event fe1b = featureEvent(user, flagkey1).flagVersion(version1) + .variation(1).value(value1).defaultValue(default1).build(); + Event fe1c = featureEvent(user, flagkey1).flagVersion(version1) + .variation(2).value(value2).defaultValue(default1).build(); + Event fe2 = featureEvent(user, flagkey2).flagVersion(version2) + .variation(2).value(value2).defaultValue(default2).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(fe1a); + ep.sendEvent(fe1b); + ep.sendEvent(fe1c); + ep.sendEvent(fe2); + } + + assertThat(es.getEventsFromLastRequest(), contains( + allOf( + isSummaryEvent(fe1a.getCreationDate(), fe2.getCreationDate()), + hasSummaryFlag(flagkey1, default1, + Matchers.containsInAnyOrder( + isSummaryEventCounter(version1, 1, value1, 2), + isSummaryEventCounter(version1, 2, value2, 1) + )), + hasSummaryFlag(flagkey2, default2, + contains(isSummaryEventCounter(version2, 2, value2, 1))) + ) + )); + } + + @Test + public void customEventIsQueuedWithUser() throws Exception { + MockEventSender es = new MockEventSender(); + LDValue data = LDValue.buildObject().put("thing", LDValue.of("stuff")).build(); + double metric = 1.5; + Event.Custom ce = customEvent(user, "eventkey").data(data).metricValue(metric).build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(ce); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isCustomEvent(ce) + )); + } + + @Test + public void customEventWithNullContextOrInvalidContextDoesNotCauseError() throws Exception { + // This should never happen because LDClient rejects such a user, but just in case, + // we want to make sure it doesn't blow up the event processor. + MockEventSender es = new MockEventSender(); + Event.Custom event1 = customEvent(invalidContext, "eventkey").build(); + Event.Custom event2 = customEvent(null, "eventkey").build(); + Event.Custom event3 = customEvent(user, "eventkey").build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(event1); + ep.sendEvent(event2); + ep.sendEvent(event3); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isCustomEvent(event3) + )); + } + + @Test + public void migrationEventIsQueued() throws Exception { + MockEventSender es = new MockEventSender(); + Event.MigrationOp event = new Event.MigrationOp( + 0, + user, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 1, + "read", + new Event.MigrationOp.InvokedMeasurement(true, false), + null, + null, + null + ); + + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(event); + } + + List events = es.getEventsFromLastRequest(); + assertThat(events, contains( + isMigrationEvent(event, userJson) + )); + // Migration events should not trigger any other events (index, debug, etc.) + Assert.assertEquals(1, events.size()); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorTest.java new file mode 100644 index 0000000..0350275 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventProcessorTest.java @@ -0,0 +1,352 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.testhelpers.JsonTestValue; + +import org.hamcrest.Matchers; +import org.junit.Test; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * These tests cover all of the basic DefaultEventProcessor behavior that is not covered by + * DefaultEventProcessorOutputTest or DefaultEventProcessorDiagnosticTest. + */ +@SuppressWarnings("javadoc") +public class DefaultEventProcessorTest extends BaseEventTest { + @SuppressWarnings("unchecked") + @Test + public void eventsAreFlushedAutomatically() throws Exception { + MockEventSender es = new MockEventSender(); + long briefFlushInterval = 50; + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).flushIntervalMillis(briefFlushInterval))) { + Event.Custom event1 = customEvent(user, "event1").build(); + Event.Custom event2 = customEvent(user, "event2").build(); + ep.sendEvent(event1); + ep.sendEvent(event2); + + // getEventsFromLastRequest will block until the MockEventSender receives a payload - we expect + // both events to be in one payload, but if some unusual delay happened in between the two + // sendEvent calls, they might be in two + List payload1 = es.getEventsFromLastRequest(); + if (payload1.size() == 1) { + assertThat(payload1, contains(isCustomEvent(event1))); + assertThat(es.getEventsFromLastRequest(), contains(isCustomEvent(event2))); + } else { + assertThat(payload1, contains(isCustomEvent(event1), isCustomEvent(event2))); + } + + Event.Custom event3 = customEvent(user, "event3").build(); + ep.sendEvent(event3); + assertThat(es.getEventsFromLastRequest(), contains(isCustomEvent(event3))); + } + + Event.Custom ce = customEvent(user, "eventkey").build(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(ce); + } + + assertThat(es.getEventsFromLastRequest(), contains( + isCustomEvent(ce) + )); + } + + @SuppressWarnings("unchecked") + @Test + public void eventsAreNotFlushedWhenNotConnected() throws Exception { + MockEventSender es = new MockEventSender(); + long briefFlushInterval = 50; + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es) + .flushIntervalMillis(briefFlushInterval) + .initiallyOffline(true))) { + Event.Custom event1 = customEvent(user, "event1").build(); + Event.Custom event2 = customEvent(user, "event2").build(); + ep.sendEvent(event1); + ep.sendEvent(event2); + + es.expectNoRequests(200); + + ep.setOffline(false); + + List payload1 = es.getEventsFromLastRequest(); + assertThat(payload1, contains(isCustomEvent(event1), isCustomEvent(event2))); + } + } + + @Test + public void closingEventProcessorForcesSynchronousFlush() throws Exception { + MockEventSender es = new MockEventSender(); + Event e = identifyEvent(user); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(e); + } + + assertThat(es.getEventsFromLastRequest(), contains(isIdentifyEvent(e, userJson))); + } + + @Test + public void nothingIsSentIfThereAreNoEvents() throws Exception { + MockEventSender es = new MockEventSender(); + DefaultEventProcessor ep = makeEventProcessor(baseConfig(es)); + ep.close(); + + assertEquals(0, es.receivedParams.size()); + } + + @Test + public void contextKeysAreFlushedAutomatically() throws Exception { + // This test sets the context key flush interval to a small value and verifies that the + // context deduplicator receives a flush call. + MockEventSender es = new MockEventSender(); + long briefContextFlushIntervalMillis = 60; + Semaphore flushCalled = new Semaphore(0); + EventContextDeduplicator contextDeduplicator = new EventContextDeduplicator() { + @Override + public Long getFlushInterval() { + return briefContextFlushIntervalMillis; + } + + @Override + public boolean processContext(LDContext context) { + return false; + } + + @Override + public void flush() { + flushCalled.release(); + } + }; + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).contextDeduplicator(contextDeduplicator))) { + boolean called = flushCalled.tryAcquire(briefContextFlushIntervalMillis * 2, TimeUnit.MILLISECONDS); + assertTrue("expected context deduplicator flush method to be called, but it was not", called); + } + } + + @Test + public void eventSenderIsClosedWithEventProcessor() throws Exception { + MockEventSender es = new MockEventSender(); + assertThat(es.closed, is(false)); + DefaultEventProcessor ep = makeEventProcessor(baseConfig(es)); + ep.close(); + assertThat(es.closed, is(true)); + } + + @Test + public void eventProcessorCatchesExceptionWhenClosingEventSender() throws Exception { + MockEventSender es = new MockEventSender(); + es.fakeErrorOnClose = new IOException("sorry"); + assertThat(es.closed, is(false)); + DefaultEventProcessor ep = makeEventProcessor(baseConfig(es)); + ep.close(); + assertThat(es.closed, is(true)); + } + + @Test + public void customBaseUriIsPassedToEventSenderForAnalyticsEvents() throws Exception { + MockEventSender es = new MockEventSender(); + Event e = identifyEvent(user); + URI uri = URI.create("fake-uri"); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es).eventsUri(uri))) { + ep.sendEvent(e); + } + + CapturedPayload p = es.awaitRequest(); + assertThat(p.eventsBaseUri, equalTo(uri)); + } + + @Test + public void eventCapacityIsEnforced() throws Exception { + int capacity = 10; + MockEventSender es = new MockEventSender(); + EventsConfigurationBuilder config = baseConfig(es).capacity(capacity) + .flushIntervalMillis(1000); + // The flush interval setting is a failsafe in case we do get a queue overflow due to the tiny buffer size - + // that might cause the special message that's generated by ep.flush() to be missed, so we just want to make + // sure a flush will happen within a few seconds so getEventsFromLastRequest() won't time out. + + try (DefaultEventProcessor ep = makeEventProcessor(config)) { + for (int i = 0; i < capacity + 2; i++) { + ep.sendEvent(identifyEvent(user)); + + // Using such a tiny buffer means there's also a tiny inbox queue, so we'll add a slight + // delay to keep EventDispatcher from being overwhelmed + Thread.sleep(10); + } + ep.flushAsync(); + assertThat(es.getEventsFromLastRequest(), Matchers.iterableWithSize(capacity)); + } + } + + @Test + public void eventCapacityDoesNotPreventSummaryEventFromBeingSent() throws Exception { + int capacity = 10; + MockEventSender es = new MockEventSender(); + EventsConfigurationBuilder config = baseConfig(es).capacity(capacity) + .flushIntervalMillis(1000); + // The flush interval setting is a failsafe in case we do get a queue overflow due to the tiny buffer size - + // that might cause the special message that's generated by ep.flush() to be missed, so we just want to make + // sure a flush will happen within a few seconds so getEventsFromLastRequest() won't time out. + + try (DefaultEventProcessor ep = makeEventProcessor(config)) { + Event.FeatureRequest fe = featureEvent(user, "flagkey").build(); + ep.sendEvent(fe); + + for (int i = 0; i < capacity; i++) { + Event.Custom ce = customEvent(user, "event-key").build(); + ep.sendEvent(ce); + + // Using such a tiny buffer means there's also a tiny inbox queue, so we'll add a slight + // delay to keep EventDispatcher from being overwhelmed + Thread.sleep(10); + } + + ep.flushAsync(); + List eventsReceived = es.getEventsFromLastRequest(); + + assertThat(eventsReceived, Matchers.iterableWithSize(capacity + 1)); + assertThat(eventsReceived.get(capacity), isSummaryEvent()); + } + } + + @Test + public void noMoreEventsAreProcessedAfterUnrecoverableError() throws Exception { + MockEventSender es = new MockEventSender(); + es.result = new EventSender.Result(false, true, null); // mustShutdown == true + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + es.awaitRequest(); + + // allow a little time for the event processor to pass the "must shut down" signal back from the sender + Thread.sleep(50); + + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + es.expectNoRequests(100); + } + } + + @Test + public void noMoreEventsAreProcessedAfterClosingEventProcessor() throws Exception { + MockEventSender es = new MockEventSender(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + ep.close(); + + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + + es.expectNoRequests(100); + } + } + + @Test + public void uncheckedExceptionFromEventSenderDoesNotStopWorkerThread() throws Exception { + MockEventSender es = new MockEventSender(); + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + es.fakeError = new RuntimeException("sorry"); + + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + es.awaitRequest(); + // MockEventSender now throws an unchecked exception up to EventProcessor's flush worker - + // verify that a subsequent flush still works + + es.fakeError = null; + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + es.awaitRequest(); + } + } + + @SuppressWarnings("unchecked") + @Test + public void eventsAreKeptInBufferIfAllFlushWorkersAreBusy() throws Exception { + // Note that in the current implementation, although the intention was that we would cancel a flush + // if there's not an available flush worker, instead what happens is that we will queue *one* flush + // in that case, and then cancel the *next* flush if the workers are still busy. This is because we + // used a BlockingQueue with a size of 1, rather than a SynchronousQueue. The test below verifies + // the current behavior. + + int numWorkers = 5; // must equal EventDispatcher.MAX_FLUSH_THREADS + LDContext testUser1 = LDContext.create("me"); + LDValue testUserJson1 = LDValue.buildObject().put("kind", "user").put("key", "me").build(); + LDContext testUser2 = LDContext.create("you"); + LDValue testUserJson2 = LDValue.buildObject().put("kind", "user").put("key", "you").build(); + LDContext testUser3 = LDContext.create("everyone we know"); + LDValue testUserJson3 = LDValue.buildObject().put("kind", "user").put("key", "everyone we know").build(); + + Object sendersWaitOnThis = new Object(); + CountDownLatch sendersSignalThisWhenWaiting = new CountDownLatch(numWorkers); + MockEventSender es = new MockEventSender(); + es.waitSignal = sendersWaitOnThis; + es.receivedCounter = sendersSignalThisWhenWaiting; + + try (DefaultEventProcessor ep = makeEventProcessor(baseConfig(es))) { + for (int i = 0; i < 5; i++) { + ep.sendEvent(identifyEvent(user)); + ep.flushAsync(); + es.awaitRequest(); // we don't need to see this payload, just throw it away + } + + // When our CountDownLatch reaches zero, it means all of the worker threads are blocked in MockEventSender + sendersSignalThisWhenWaiting.await(); + es.waitSignal = null; + es.receivedCounter = null; + + // Now, put an event in the buffer and try to flush again. In the current implementation (see + // above) this payload gets queued in a holding area, and will be flushed after a worker + // becomes free. + Event.Identify event1 = identifyEvent(testUser1); + ep.sendEvent(event1); + ep.flushAsync(); + + // Do an additional flush with another event. This time, the event processor should see that there's + // no space available and simply ignore the flush request. There's no way to verify programmatically + // that this has happened, so just give it a short delay. + Event.Identify event2 = identifyEvent(testUser2); + ep.sendEvent(event2); + ep.flushAsync(); + Thread.sleep(100); + + // Enqueue a third event. The current payload should now be event2 + event3. + Event.Identify event3 = identifyEvent(testUser3); + ep.sendEvent(event3); + + // Now allow the workers to unblock + synchronized (sendersWaitOnThis) { + sendersWaitOnThis.notifyAll(); + } + + // The first unblocked worker should pick up the queued payload with event1. + assertThat(es.getEventsFromLastRequest(), contains(isIdentifyEvent(event1, testUserJson1))); + + // Now a flush should succeed and send the current payload. + ep.flushAsync(); + assertThat(es.getEventsFromLastRequest(), contains( + isIdentifyEvent(event2, testUserJson2), + isIdentifyEvent(event3, testUserJson3))); + } + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventSenderTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventSenderTest.java new file mode 100644 index 0000000..3084475 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DefaultEventSenderTest.java @@ -0,0 +1,403 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.internal.http.HeadersTransformer; +import com.launchdarkly.sdk.internal.http.HttpProperties; +import com.launchdarkly.testhelpers.httptest.Handler; +import com.launchdarkly.testhelpers.httptest.Handlers; +import com.launchdarkly.testhelpers.httptest.HttpServer; +import com.launchdarkly.testhelpers.httptest.RequestInfo; + +import org.junit.Test; + +import java.net.URI; +import java.nio.charset.Charset; +import java.text.SimpleDateFormat; +import java.time.Duration; +import java.util.Date; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.UUID; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.equalToIgnoringCase; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class DefaultEventSenderTest extends BaseEventTest { + private static final String FAKE_DATA = "some data"; + private static final byte[] FAKE_DATA_BYTES = FAKE_DATA.getBytes(Charset.forName("UTF-8")); + private static final SimpleDateFormat httpDateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", + Locale.US); + private static final long BRIEF_RETRY_DELAY_MILLIS = 50; + + @Override + protected boolean enableTestInAndroid() { + // Currently our use of com.launchdarkly.testhelpers.httptest makes this test file + // unusable in our Android CI test job. This is the only test file in the events + // package that performs end-to-end HTTP. + return false; + } + + private EventSender makeEventSender() { + return makeEventSender(HttpProperties.defaults()); + } + + private EventSender makeEventSender(HttpProperties httpProperties) { + return new DefaultEventSender(httpProperties, null, null, BRIEF_RETRY_DELAY_MILLIS, testLogger); + } + + @Test + public void analyticsDataIsDelivered() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + + assertTrue(result.isSuccess()); + assertFalse(result.isMustShutDown()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals(DefaultEventSender.DEFAULT_ANALYTICS_REQUEST_PATH, req.getPath()); + assertThat(req.getHeader("content-type"), equalToIgnoringCase("application/json; charset=utf-8")); + assertEquals(FAKE_DATA, req.getBody()); + } + } + + @Test + public void diagnosticDataIsDelivered() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendDiagnosticEvent(FAKE_DATA_BYTES, server.getUri()); + + assertTrue(result.isSuccess()); + assertFalse(result.isMustShutDown()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals(DefaultEventSender.DEFAULT_DIAGNOSTIC_REQUEST_PATH, req.getPath()); + assertThat(req.getHeader("content-type"), equalToIgnoringCase("application/json; charset=utf-8")); + assertEquals(FAKE_DATA, req.getBody()); + } + } + + @Test + public void customRequestPaths() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = new DefaultEventSender(HttpProperties.defaults(), + "/custom/path/a", "/custom/path/d", BRIEF_RETRY_DELAY_MILLIS, testLogger)) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + assertTrue(result.isSuccess()); + result = es.sendDiagnosticEvent(FAKE_DATA_BYTES, server.getUri()); + assertTrue(result.isSuccess()); + } + + RequestInfo req1 = server.getRecorder().requireRequest(); + assertEquals("/custom/path/a", req1.getPath()); + RequestInfo req2 = server.getRecorder().requireRequest(); + assertEquals("/custom/path/d", req2.getPath()); + } + } + + @Test + public void headersAreSentForAnalytics() throws Exception { + Map headers = new HashMap<>(); + headers.put("name1", "value1"); + headers.put("name2", "value2"); + HttpProperties httpProperties = new HttpProperties(0, headers, null, null, null, null, 0, null, null); + + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender(httpProperties)) { + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + for (Map.Entry kv: headers.entrySet()) { + assertThat(req.getHeader(kv.getKey()), equalTo(kv.getValue())); + } + } + } + + @Test + public void headersAreSentForDiagnostics() throws Exception { + Map headers = new HashMap<>(); + headers.put("name1", "value1"); + headers.put("name2", "value2"); + HttpProperties httpProperties = new HttpProperties(0, headers, null, null, null, null, 0, null, null); + + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender(httpProperties)) { + es.sendDiagnosticEvent(FAKE_DATA_BYTES, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + for (Map.Entry kv: headers.entrySet()) { + assertThat(req.getHeader(kv.getKey()), equalTo(kv.getValue())); + } + } + } + + @Test + public void headersTransformerIsApplied() throws Exception { + Map headers = new HashMap<>(); + headers.put("name1", "value1"); + headers.put("name2", "value2"); + HeadersTransformer headersTransformer = new HeadersTransformer() { + @Override + public void updateHeaders(Map h) { + h.put("name1", h.get("name1") + "a"); + } + }; + HttpProperties httpProperties = new HttpProperties(0, headers, headersTransformer, null, null, null, 0, null, null); + + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender(httpProperties)) { + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertThat(req.getHeader("name1"), equalTo("value1a")); + assertThat(req.getHeader("name2"), equalTo("value2")); + } + } + + @Test + public void eventSchemaIsSentForAnalytics() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertThat(req.getHeader("X-LaunchDarkly-Event-Schema"), equalTo("4")); + } + } + + @Test + public void eventPayloadIdIsSentForAnalytics() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + String payloadHeaderValue = req.getHeader("X-LaunchDarkly-Payload-ID"); + assertThat(payloadHeaderValue, notNullValue(String.class)); + assertThat(UUID.fromString(payloadHeaderValue), notNullValue(UUID.class)); + } + } + + @Test + public void eventPayloadIdReusedOnRetry() throws Exception { + Handler errorResponse = Handlers.status(429); + Handler errorThenSuccess = Handlers.sequential(errorResponse, eventsSuccessResponse(), eventsSuccessResponse()); + + try (HttpServer server = HttpServer.start(errorThenSuccess)) { + try (EventSender es = makeEventSender()) { + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + } + + // Failed response request + RequestInfo req = server.getRecorder().requireRequest(); + String payloadId = req.getHeader("X-LaunchDarkly-Payload-ID"); + // Retry request has same payload ID as failed request + req = server.getRecorder().requireRequest(); + String retryId = req.getHeader("X-LaunchDarkly-Payload-ID"); + assertThat(retryId, equalTo(payloadId)); + // Second request has different payload ID from first request + req = server.getRecorder().requireRequest(); + payloadId = req.getHeader("X-LaunchDarkly-Payload-ID"); + assertThat(retryId, not(equalTo(payloadId))); + } + } + + @Test + public void eventSchemaNotSetOnDiagnosticEvents() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + es.sendDiagnosticEvent(FAKE_DATA_BYTES, server.getUri()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertNull(req.getHeader("X-LaunchDarkly-Event-Schema")); + } + } + + @Test + public void http400ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(400); + } + + @Test + public void http401ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(401); + } + + @Test + public void http403ErrorIsUnrecoverable() throws Exception { + testUnrecoverableHttpError(403); + } + + // Cannot test our retry logic for 408, because OkHttp insists on doing its own retry on 408 so that + // we never actually see that response status. +// @Test +// public void http408ErrorIsRecoverable() throws Exception { +// testRecoverableHttpError(408); +// } + + @Test + public void http429ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(429); + } + + @Test + public void http500ErrorIsRecoverable() throws Exception { + testRecoverableHttpError(500); + } + + @Test + public void serverDateIsParsed() throws Exception { + long fakeTime = ((new Date().getTime() - 100000) / 1000) * 1000; // don't expect millisecond precision + Handler resp = Handlers.all(eventsSuccessResponse(), addDateHeader(new Date(fakeTime))); + + try (HttpServer server = HttpServer.start(resp)) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + + assertNotNull(result.getTimeFromServer()); + assertEquals(fakeTime, result.getTimeFromServer().getTime()); + } + } + } + + @Test + public void invalidServerDateIsIgnored() throws Exception { + Handler resp = Handlers.all(eventsSuccessResponse(), Handlers.header("Date", "not a date")); + + try (HttpServer server = HttpServer.start(resp)) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + + assertTrue(result.isSuccess()); + assertNull(result.getTimeFromServer()); + } + } + } + + @Test + public void baseUriDoesNotNeedTrailingSlash() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + URI uriWithoutSlash = URI.create(server.getUri().toString().replaceAll("/$", "")); + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, uriWithoutSlash); + + assertTrue(result.isSuccess()); + assertFalse(result.isMustShutDown()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/bulk", req.getPath()); + assertThat(req.getHeader("content-type"), equalToIgnoringCase("application/json; charset=utf-8")); + assertEquals(FAKE_DATA, req.getBody()); + } + } + + @Test + public void baseUriCanHaveContextPath() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + URI baseUri = server.getUri().resolve("/context/path"); + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, baseUri); + + assertTrue(result.isSuccess()); + assertFalse(result.isMustShutDown()); + } + + RequestInfo req = server.getRecorder().requireRequest(); + assertEquals("/context/path/bulk", req.getPath()); + assertThat(req.getHeader("content-type"), equalToIgnoringCase("application/json; charset=utf-8")); + assertEquals(FAKE_DATA, req.getBody()); + } + } + + @Test + public void nothingIsSentForNullData() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + EventSender.Result result1 = es.sendAnalyticsEvents(null, 0, server.getUri()); + EventSender.Result result2 = es.sendDiagnosticEvent(null, server.getUri()); + + assertTrue(result1.isSuccess()); + assertTrue(result2.isSuccess()); + assertEquals(0, server.getRecorder().count()); + } + } + } + + @Test + public void nothingIsSentForEmptyData() throws Exception { + try (HttpServer server = HttpServer.start(eventsSuccessResponse())) { + try (EventSender es = makeEventSender()) { + EventSender.Result result1 = es.sendAnalyticsEvents(new byte[0], 0, server.getUri()); + EventSender.Result result2 = es.sendDiagnosticEvent(new byte[0], server.getUri()); + + assertTrue(result1.isSuccess()); + assertTrue(result2.isSuccess()); + assertEquals(0, server.getRecorder().count()); + } + } + } + + private void testUnrecoverableHttpError(int status) throws Exception { + Handler errorResponse = Handlers.status(status); + + try (HttpServer server = HttpServer.start(errorResponse)) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + + assertFalse(result.isSuccess()); + assertTrue(result.isMustShutDown()); + } + + server.getRecorder().requireRequest(); + + // it does not retry after this type of error, so there are no more requests + server.getRecorder().requireNoRequests(Duration.ofMillis(100)); + } + } + + private void testRecoverableHttpError(int status) throws Exception { + Handler errorResponse = Handlers.status(status); + Handler errorsThenSuccess = Handlers.sequential(errorResponse, errorResponse, eventsSuccessResponse()); + // send two errors in a row, because the flush will be retried one time + + try (HttpServer server = HttpServer.start(errorsThenSuccess)) { + try (EventSender es = makeEventSender()) { + EventSender.Result result = es.sendAnalyticsEvents(FAKE_DATA_BYTES, 1, server.getUri()); + + assertFalse(result.isSuccess()); + assertFalse(result.isMustShutDown()); + } + + server.getRecorder().requireRequest(); + server.getRecorder().requireRequest(); + server.getRecorder().requireNoRequests(Duration.ofMillis(100)); // only 2 requests total + } + } + + private Handler eventsSuccessResponse() { + return Handlers.status(202); + } + + private Handler addDateHeader(Date date) { + return Handlers.header("Date", httpDateFormat.format(date)); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticEventTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticEventTest.java new file mode 100644 index 0000000..2cb9194 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticEventTest.java @@ -0,0 +1,46 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.launchdarkly.sdk.internal.BaseTest; + +import org.junit.Test; + +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import static com.launchdarkly.sdk.internal.GsonHelpers.gsonInstance; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class DiagnosticEventTest extends BaseTest { + private static List testStreamInits = + Collections.singletonList(new DiagnosticEvent.StreamInit(1500, 100, true)); + + @Test + public void testSerialization() { + DiagnosticId diagnosticId = new DiagnosticId("SDK_KEY"); + DiagnosticEvent diagnosticStatisticsEvent = DiagnosticEvent.makeStatistics(2000, diagnosticId, 1000, 1, 2, 3, testStreamInits); + JsonObject jsonObject = gsonInstance().toJsonTree(diagnosticStatisticsEvent.value).getAsJsonObject(); + assertEquals(8, jsonObject.size()); + assertEquals("diagnostic", jsonObject.getAsJsonPrimitive("kind").getAsString()); + assertEquals(2000, jsonObject.getAsJsonPrimitive("creationDate").getAsLong()); + JsonObject idObject = jsonObject.getAsJsonObject("id"); + assertEquals("DK_KEY", idObject.getAsJsonPrimitive("sdkKeySuffix").getAsString()); + // Throws InvalidArgumentException on invalid UUID + @SuppressWarnings("unused") + UUID uuid = UUID.fromString(idObject.getAsJsonPrimitive("diagnosticId").getAsString()); + assertEquals(1000, jsonObject.getAsJsonPrimitive("dataSinceDate").getAsLong()); + assertEquals(1, jsonObject.getAsJsonPrimitive("droppedEvents").getAsLong()); + assertEquals(2, jsonObject.getAsJsonPrimitive("deduplicatedUsers").getAsLong()); + assertEquals(3, jsonObject.getAsJsonPrimitive("eventsInLastBatch").getAsLong()); + JsonArray initsJson = jsonObject.getAsJsonArray("streamInits"); + assertEquals(1, initsJson.size()); + JsonObject initJson = initsJson.get(0).getAsJsonObject(); + assertEquals(1500, initJson.getAsJsonPrimitive("timestamp").getAsInt()); + assertEquals(100, initJson.getAsJsonPrimitive("durationMillis").getAsInt()); + assertTrue(initJson.getAsJsonPrimitive("failed").getAsBoolean()); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticIdTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticIdTest.java new file mode 100644 index 0000000..49f249d --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticIdTest.java @@ -0,0 +1,53 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import com.launchdarkly.sdk.internal.BaseTest; + +import org.junit.Test; + +import java.util.UUID; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +@SuppressWarnings("javadoc") +public class DiagnosticIdTest extends BaseTest { + private static final Gson gson = new Gson(); + + @Test + public void hasUUID() { + DiagnosticId diagnosticId = new DiagnosticId("SDK_KEY"); + assertNotNull(diagnosticId.diagnosticId); + assertNotNull(UUID.fromString(diagnosticId.diagnosticId)); + } + + @Test + public void nullKeyIsSafe() { + // We can't send diagnostics without a key anyway, so we're just validating that the + // constructor won't crash with a null key + new DiagnosticId(null); + } + + @Test + public void shortKeyIsSafe() { + DiagnosticId diagnosticId = new DiagnosticId("foo"); + assertEquals("foo", diagnosticId.sdkKeySuffix); + } + + @Test + public void keyIsSuffix() { + DiagnosticId diagnosticId = new DiagnosticId("this_is_a_fake_key"); + assertEquals("ke_key", diagnosticId.sdkKeySuffix); + } + + @Test + public void gsonSerialization() { + DiagnosticId diagnosticId = new DiagnosticId("this_is_a_fake_key"); + JsonObject jsonObject = gson.toJsonTree(diagnosticId).getAsJsonObject(); + assertEquals(2, jsonObject.size()); + String id = jsonObject.getAsJsonPrimitive("diagnosticId").getAsString(); + assertNotNull(UUID.fromString(id)); + assertEquals("ke_key", jsonObject.getAsJsonPrimitive("sdkKeySuffix").getAsString()); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticStoreTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticStoreTest.java new file mode 100644 index 0000000..097ed57 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/DiagnosticStoreTest.java @@ -0,0 +1,197 @@ +package com.launchdarkly.sdk.internal.events; + +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.BaseTest; +import com.launchdarkly.sdk.internal.events.DiagnosticStore.SdkDiagnosticParams; + +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; + +import static com.launchdarkly.testhelpers.JsonAssertions.isJsonArray; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonEqualsValue; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonProperty; +import static com.launchdarkly.testhelpers.JsonAssertions.jsonUndefined; +import static com.launchdarkly.testhelpers.JsonTestValue.jsonFromValue; +import static java.util.Collections.singletonMap; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertNotEquals; + +@SuppressWarnings("javadoc") +public class DiagnosticStoreTest extends BaseTest { + private static final String SDK_KEY = "key-abcdefg"; + private static final String SDK_NAME = "fake-sdk"; + private static final String SDK_VERSION = "1.2.3"; + private static final String PLATFORM_NAME = "fake-platform"; + + @Test + public void initEventBasicProperties() { + long now = System.currentTimeMillis(); + DiagnosticStore store = makeSimpleStore(); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(ie.initEvent, is(true)); + assertThat(ie.value.get("creationDate").longValue(), greaterThanOrEqualTo(now)); + assertThat(ie.value.get("id").get("diagnosticId"), not(equalTo(LDValue.ofNull()))); + assertThat(ie.value.get("id").get("sdkKeySuffix").stringValue(), equalTo("bcdefg")); + } + + @Test + public void initEventSdkData() { + DiagnosticStore store = makeSimpleStore(); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(jsonFromValue(ie.value), + jsonProperty("sdk", allOf( + jsonProperty("name", SDK_NAME), + jsonProperty("version", SDK_VERSION), + jsonProperty("wrapperName", jsonUndefined()), + jsonProperty("wrapperVersion", jsonUndefined()) + ))); + } + + @Test + public void initEventSdkDataWithWrapperName() { + DiagnosticStore store = new DiagnosticStore(new SdkDiagnosticParams( + SDK_KEY, SDK_NAME, SDK_VERSION, PLATFORM_NAME, null, + singletonMap("X-LaunchDarkly-Wrapper", "Scala"), + null + )); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(jsonFromValue(ie.value), + jsonProperty("sdk", allOf( + jsonProperty("name", SDK_NAME), + jsonProperty("version", SDK_VERSION), + jsonProperty("wrapperName", "Scala"), + jsonProperty("wrapperVersion", jsonUndefined()) + ))); + } + + @Test + public void initEventSdkDataWithWrapperNameAndVersion() { + DiagnosticStore store = new DiagnosticStore(new SdkDiagnosticParams( + SDK_KEY, SDK_NAME, SDK_VERSION, PLATFORM_NAME, null, + singletonMap("X-LaunchDarkly-Wrapper", "Scala/0.1"), + null + )); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(jsonFromValue(ie.value), + jsonProperty("sdk", allOf( + jsonProperty("name", SDK_NAME), + jsonProperty("version", SDK_VERSION), + jsonProperty("wrapperName", "Scala"), + jsonProperty("wrapperVersion", "0.1") + ))); + } + + @Test + public void platformDataFromSdk() { + DiagnosticStore store = new DiagnosticStore(new SdkDiagnosticParams( + SDK_KEY, SDK_NAME, SDK_VERSION, PLATFORM_NAME, + LDValue.buildObject().put("prop1", 2).put("prop2", 3).build(), + null, null + )); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(jsonFromValue(ie.value), + jsonProperty("platform", allOf( + jsonProperty("name", PLATFORM_NAME), + jsonProperty("prop1", 2), + jsonProperty("prop2", 3) + ))); + } + + @Test + public void configurationData() { + List configValues = Arrays.asList( + LDValue.buildObject() + .put(DiagnosticConfigProperty.EVENTS_CAPACITY.name, 1000) + .put(DiagnosticConfigProperty.USER_KEYS_CAPACITY.name, 2000) + .put(DiagnosticConfigProperty.ALL_ATTRIBUTES_PRIVATE.name, "yes") // ignored because of wrong type + .build(), + LDValue.of("abcdef"), // ignored because it's not an object + null, // no-op + LDValue.buildObject().put(DiagnosticConfigProperty.DATA_STORE_TYPE.name, "custom").build() + ); + DiagnosticStore store = new DiagnosticStore(new SdkDiagnosticParams( + SDK_KEY, SDK_NAME, SDK_VERSION, PLATFORM_NAME, null, null, + configValues + )); + DiagnosticEvent ie = store.getInitEvent(); + assertThat(jsonFromValue(ie.value), + jsonProperty("configuration", jsonEqualsValue( + LDValue.buildObject() + .put(DiagnosticConfigProperty.EVENTS_CAPACITY.name, 1000) + .put(DiagnosticConfigProperty.USER_KEYS_CAPACITY.name, 2000) + .put(DiagnosticConfigProperty.DATA_STORE_TYPE.name, "custom") + .build() + ))); + } + + @Test + public void createsDiagnosticStatisticsEvent() { + DiagnosticStore store = makeSimpleStore(); + long startDate = store.getDataSinceDate(); + DiagnosticEvent statsEvent = store.createEventAndReset(10, 15); + + assertThat(jsonFromValue(statsEvent.value), allOf( + jsonProperty("id", jsonProperty("diagnosticId", store.getDiagnosticId().diagnosticId)), + jsonProperty("droppedEvents", 10), + jsonProperty("deduplicatedUsers", 15), + jsonProperty("eventsInLastBatch", 0), + jsonProperty("dataSinceDate", startDate) + )); + } + + @Test + public void canRecordStreamInit() { + DiagnosticStore store = makeSimpleStore(); + store.recordStreamInit(1000, 200, false); + DiagnosticEvent statsEvent = store.createEventAndReset(0, 0); + + assertThat(jsonFromValue(statsEvent.value), + jsonProperty("streamInits", isJsonArray( + contains( + allOf( + jsonProperty("timestamp", 1000), + jsonProperty("durationMillis", 200), + jsonProperty("failed", false) + ) + ) + ))); + } + + @Test + public void canRecordEventsInBatch() { + DiagnosticStore store = makeSimpleStore(); + store.recordEventsInBatch(100); + DiagnosticEvent statsEvent = store.createEventAndReset(0, 0); + assertThat(jsonFromValue(statsEvent.value), + jsonProperty("eventsInLastBatch", 100)); + } + + @Test + public void resetsStatsOnCreate() throws InterruptedException { + DiagnosticStore store = makeSimpleStore(); + store.recordStreamInit(1000, 200, false); + store.recordEventsInBatch(100); + long startDate = store.getDataSinceDate(); + Thread.sleep(2); // so that dataSinceDate will be different + store.createEventAndReset(0, 0); + assertNotEquals(startDate, store.getDataSinceDate()); + DiagnosticEvent statsEvent = store.createEventAndReset(0, 0); + assertThat(jsonFromValue(statsEvent.value), allOf( + jsonProperty("eventsInLastBatch", 0), + jsonProperty("streamInits", isJsonArray(emptyIterable())) + )); + } + + private static DiagnosticStore makeSimpleStore() { + return new DiagnosticStore(new SdkDiagnosticParams(SDK_KEY, SDK_NAME, SDK_VERSION, PLATFORM_NAME, null, null, null)); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventContextFormatterTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventContextFormatterTest.java new file mode 100644 index 0000000..8e685f6 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventContextFormatterTest.java @@ -0,0 +1,181 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.common.collect.ImmutableList; +import com.google.gson.stream.JsonWriter; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.internal.BaseTest; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static com.launchdarkly.testhelpers.JsonAssertions.assertJsonEquals; + +@SuppressWarnings("javadoc") +@RunWith(Parameterized.class) +public class EventContextFormatterTest extends BaseTest { + private final LDContext context; + private final boolean allAttributesPrivate; + private final AttributeRef[] globalPrivateAttributes; + private final String expectedJson; + + public EventContextFormatterTest( + String name, + LDContext context, + boolean allAttributesPrivate, + AttributeRef[] globalPrivateAttributes, + String expectedJson + ) { + this.context = context; + this.allAttributesPrivate = allAttributesPrivate; + this.globalPrivateAttributes = globalPrivateAttributes; + this.expectedJson = expectedJson; + } + + // Note, due to a known issue with the Android test orchestrator (https://github.com/android/android-test/issues/837), + // none of the parameters for this parameterized test can be null. + + @Parameterized.Parameters(name = "{0}") + public static Iterable data() { + return ImmutableList.of( + new Object[] { + "no attributes private - single kind", + LDContext.builder("my-key").kind("org") + .name("my-name") + .set("attr1", "value1") + .build(), + false, + new AttributeRef[0], + "{\"kind\": \"org\", \"key\": \"my-key\", \"name\": \"my-name\", \"attr1\": \"value1\"}" + }, + new Object[] { + "no attributes private - multi-kind", + LDContext.createMulti( + LDContext.builder("org-key").kind("org") + .name("org-name") + .build(), + LDContext.builder("user-key") + .name("user-name") + .set("attr1", "value1") + .build() + ), + false, + new AttributeRef[0], + "{" + + "\"kind\": \"multi\"," + + "\"org\": {\"key\": \"org-key\", \"name\": \"org-name\"}," + + "\"user\": {\"key\": \"user-key\", \"name\": \"user-name\", \"attr1\": \"value1\"}" + + "}" + }, + new Object[] { + "anonymous", + LDContext.builder("my-key").kind("org").anonymous(true).build(), + false, + new AttributeRef[0], + "{\"kind\": \"org\", \"key\": \"my-key\", \"anonymous\": true}" + }, + new Object[] { + "all attributes private globally", + LDContext.builder("my-key").kind("org") + .name("my-name") + .set("attr1", "value1") + .build(), + true, + new AttributeRef[0], + "{" + + "\"kind\": \"org\"," + + "\"key\": \"my-key\"," + + "\"_meta\": {" + + "\"redactedAttributes\": [\"attr1\", \"name\"]" + + "}" + + "}" + }, + new Object[] { + "some top-level attributes private", + LDContext.builder("my-key").kind("org") + .name("my-name") + .set("attr1", "value1") + .set("attr2", "value2") + .privateAttributes("attr2") + .build(), + false, + new AttributeRef[] { AttributeRef.fromLiteral("name") }, + "{" + + "\"kind\": \"org\"," + + "\"key\": \"my-key\"," + + "\"attr1\": \"value1\"," + + "\"_meta\": {" + + "\"redactedAttributes\": [\"attr2\", \"name\"]" + + "}" + + "}" + }, + new Object[] { + "partially redacting object attributes", + LDContext.builder("my-key") + .set("address", LDValue.parse("{\"street\": \"17 Highbrow St.\", \"city\": \"London\"}")) + .set("complex", LDValue.parse("{\"a\": {\"b\": {\"c\": 1, \"d\": 2}, \"e\": 3}, \"f\": 4, \"g\": 5}")) + .privateAttributes("/complex/a/b/d", "/complex/a/b/nonexistent-prop", "/complex/f", "/complex/g/g-is-not-an-object") + .build(), + false, + new AttributeRef[] { AttributeRef.fromPath("/address/street") }, + "{" + + "\"kind\": \"user\"," + + "\"key\": \"my-key\"," + + "\"address\": {\"city\": \"London\"}," + + "\"complex\": {\"a\": {\"b\": {\"c\": 1}, \"e\": 3}, \"g\": 5}," + + "\"_meta\": {" + + "\"redactedAttributes\": [\"/address/street\", \"/complex/a/b/d\", \"/complex/f\"]" + + "}" + + "}" + } + ); + } + + @Test + public void testOutput() throws Exception { + EventContextFormatter f = new EventContextFormatter(allAttributesPrivate, globalPrivateAttributes); + StringWriter sw = new StringWriter(); + JsonWriter jw = new JsonWriter(sw); + + f.write(context, jw, false); + jw.flush(); + + String canonicalizedOutput = canonicalizeOutputJson(sw.toString()); + assertJsonEquals(expectedJson, canonicalizedOutput); + } + + private static String canonicalizeOutputJson(String json) { + return valueWithRedactedAttributesSorted(LDValue.parse(json)).toJsonString(); + } + + private static LDValue valueWithRedactedAttributesSorted(LDValue value) { + switch (value.getType()) { + case OBJECT: + ObjectBuilder ob = LDValue.buildObject(); + for (String key: value.keys()) { + LDValue propValue = value.get(key); + if (key.equals("redactedAttributes")) { + List strings = new ArrayList<>(); + for (LDValue element: propValue.values()) { + strings.add(element.stringValue()); + } + Collections.sort(strings); + ob.put(key, LDValue.Convert.String.arrayFrom(strings)); + } else { + ob.put(key, valueWithRedactedAttributesSorted(propValue)); + } + } + return ob.build(); + default: + return value; + } + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventOutputTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventOutputTest.java new file mode 100644 index 0000000..3e08a1a --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventOutputTest.java @@ -0,0 +1,738 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.common.collect.ImmutableSet; +import com.google.gson.Gson; +import com.launchdarkly.sdk.AttributeRef; +import com.launchdarkly.sdk.ContextBuilder; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.EvaluationReason; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.ObjectBuilder; +import com.launchdarkly.sdk.internal.events.Event.FeatureRequest; +import com.launchdarkly.sdk.internal.events.EventSummarizer.EventSummary; + +import org.junit.Test; + +import java.io.IOException; +import java.io.StringWriter; + +import static com.launchdarkly.sdk.EvaluationDetail.NO_VARIATION; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.Assert.assertEquals; + +@SuppressWarnings("javadoc") +public class EventOutputTest extends BaseEventTest { + private static final Gson gson = new Gson(); + + private final ContextBuilder contextBuilderWithAllAttributes = LDContext.builder("userkey") + .name("me") + .set("custom1", "value1") + .set("custom2", "value2"); + private static final LDValue contextJsonWithAllAttributes = parseValue("{" + + "\"kind\":\"user\"," + + "\"key\":\"userkey\"," + + "\"custom1\":\"value1\"," + + "\"custom2\":\"value2\"," + + "\"name\":\"me\"" + + "}"); + + @Test + public void allAttributesAreSerialized() throws Exception { + testInlineContextSerialization(contextBuilderWithAllAttributes.build(), contextJsonWithAllAttributes, + defaultEventsConfig()); + } + + @Test + public void contextKeysAreSetInsteadOfContextWhenNotInlined() throws Exception { + testContextKeysSerialization( + LDContext.create("userkey"), + LDValue.buildObject().put("user", "userkey").build() + ); + + testContextKeysSerialization( + LDContext.create(ContextKind.of("kind1"), "key1"), + LDValue.buildObject().put("kind1", "key1").build() + ); + + testContextKeysSerialization( + LDContext.createMulti( + LDContext.create(ContextKind.of("kind1"), "key1"), + LDContext.create(ContextKind.of("kind2"), "key2")), + LDValue.buildObject().put("kind1", "key1").put("kind2", "key2").build() + ); + } + + @Test + public void allAttributesPrivateMakesAttributesPrivate() throws Exception { + // We test this behavior in more detail in EventContextFormatterTest, but here we're verifying that the + // EventOutputFormatter is actually using EventContextFormatter and configuring it correctly. + LDContext context = LDContext.builder("userkey") + .name("me") + .build(); + LDValue expectedJson = LDValue.buildObject() + .put("kind", "user") + .put("key", context.getKey()) + .put("_meta", LDValue.parse("{\"redactedAttributes\":[\"name\"]}")) + .build(); + EventsConfiguration config = makeEventsConfig(true, null); + testInlineContextSerialization(context, expectedJson, config); + } + + @Test + public void globalPrivateAttributeNamesMakeAttributesPrivate() throws Exception { + // See comment in allAttributesPrivateMakesAttributesPrivate + LDContext context = LDContext.builder("userkey") + .name("me") + .set("attr1", "value1") + .build(); + LDValue expectedJson = LDValue.buildObject() + .put("kind", "user") + .put("key", context.getKey()) + .put("name", "me") + .put("_meta", LDValue.parse("{\"redactedAttributes\":[\"attr1\"]}")) + .build(); + EventsConfiguration config = makeEventsConfig(false, ImmutableSet.of(AttributeRef.fromLiteral("attr1"))); + testInlineContextSerialization(context, expectedJson, config); + } + + @Test + public void perContextPrivateAttributesMakeAttributePrivate() throws Exception { + // See comment in allAttributesPrivateMakesAttributesPrivate + LDContext context = LDContext.builder("userkey") + .name("me") + .set("attr1", "value1") + .privateAttributes("attr1") + .build(); + LDValue expectedJson = LDValue.buildObject() + .put("kind", "user") + .put("key", context.getKey()) + .put("name", "me") + .put("_meta", LDValue.parse("{\"redactedAttributes\":[\"attr1\"]}")) + .build(); + EventsConfiguration config = makeEventsConfig(false, null); + testInlineContextSerialization(context, expectedJson, config); + } + + private ObjectBuilder buildFeatureEventProps(String key, String userKey) { + return LDValue.buildObject() + .put("kind", "feature") + .put("key", key) + .put("creationDate", 100000) + .put("context", LDValue.buildObject().put("kind", "user").put("key", userKey).build()); + } + + private ObjectBuilder buildFeatureEventProps(String key) { + return buildFeatureEventProps(key, "userkey"); + } + + @Test + public void featureEventIsSerialized() throws Exception { + LDContext context = LDContext.builder("userkey").name("me").build(); + LDValue value = LDValue.of("flagvalue"), defaultVal = LDValue.of("defaultvalue"); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + FeatureRequest feWithVariation = featureEvent(context, FLAG_KEY).flagVersion(FLAG_VERSION).variation(1) + .value(value).defaultValue(defaultVal).build(); + LDValue feJson1 = buildFeatureEventProps(FLAG_KEY) + .put("version", FLAG_VERSION) + .put("variation", 1) + .put("value", value) + .put("default", defaultVal) + .put("context", LDValue.buildObject().put("kind", "user").put("key", "userkey").put("name", "me").build()) + .build(); + assertJsonEquals(feJson1, getSingleOutputEvent(f, feWithVariation)); + + FeatureRequest feWithoutVariationOrDefault = featureEvent(context, FLAG_KEY).flagVersion(FLAG_VERSION) + .variation(NO_VARIATION).value(value).defaultValue(null).build(); + LDValue feJson2 = buildFeatureEventProps(FLAG_KEY) + .put("version", FLAG_VERSION) + .put("value", value) + .put("context", LDValue.buildObject().put("kind", "user").put("key", "userkey").put("name", "me").build()) + .build(); + assertJsonEquals(feJson2, getSingleOutputEvent(f, feWithoutVariationOrDefault)); + + FeatureRequest feWithReason = featureEvent(context, FLAG_KEY).flagVersion(FLAG_VERSION).variation(1) + .value(value).defaultValue(defaultVal).reason(EvaluationReason.fallthrough()).build(); + LDValue feJson3 = buildFeatureEventProps(FLAG_KEY) + .put("version", FLAG_VERSION) + .put("variation", 1) + .put("value", value) + .put("default", defaultVal) + .put("reason", LDValue.buildObject().put("kind", "FALLTHROUGH").build()) + .put("context", LDValue.buildObject().put("kind", "user").put("key", "userkey").put("name", "me").build()) + .build(); + assertJsonEquals(feJson3, getSingleOutputEvent(f, feWithReason)); + + Event.FeatureRequest debugEvent = feWithVariation.toDebugEvent(); + LDValue feJson5 = LDValue.buildObject() + .put("kind", "debug") + .put("key", FLAG_KEY) + .put("creationDate", 100000) + .put("version", FLAG_VERSION) + .put("variation", 1) + .put("context", LDValue.buildObject().put("kind", "user").put("key", "userkey").put("name", "me").build()) + .put("value", value) + .put("default", defaultVal) + .build(); + assertJsonEquals(feJson5, getSingleOutputEvent(f, debugEvent)); + + Event.FeatureRequest prereqEvent = featureEvent(context, FLAG_KEY).flagVersion(FLAG_VERSION) + .variation(1).value(value).defaultValue(null).prereqOf("parent").build(); + LDValue feJson6 = buildFeatureEventProps(FLAG_KEY) + .put("version", 11) + .put("variation", 1) + .put("value", "flagvalue") + .put("prereqOf", "parent") + .put("context", LDValue.buildObject().put("kind", "user").put("key", "userkey").put("name", "me").build()) + .build(); + assertJsonEquals(feJson6, getSingleOutputEvent(f, prereqEvent)); + } + + @Test + public void featureEventRedactsAnonymousContextAttributes() throws Exception { + LDValue value = LDValue.of("flagvalue"), defaultVal = LDValue.of("defaultvalue"); + + // Single-kind context redaction + LDContext user_context = LDContext.builder("userkey").anonymous(true).name("me").set("age", 42).build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + FeatureRequest feWithVariation1 = featureEvent(user_context, FLAG_KEY).flagVersion(FLAG_VERSION).variation(1) + .value(value).defaultValue(defaultVal).build(); + LDValue contextJson = LDValue.buildObject() + .put("kind", "user") + .put("key", "userkey") + .put("anonymous", true) + .put("_meta", LDValue.parse("{\"redactedAttributes\":[\"name\", \"age\"]}")) + .build(); + LDValue feJson1 = buildFeatureEventProps(FLAG_KEY) + .put("version", FLAG_VERSION) + .put("variation", 1) + .put("value", value) + .put("default", defaultVal) + .put("context", contextJson) + .build(); + assertJsonEquals(feJson1, getSingleOutputEvent(f, feWithVariation1)); + + // Multi-kind context redaction + LDContext org_context = LDContext.builder("orgkey").anonymous(false).kind("org").name("me").set("age", 42).build(); + LDContext multi_context = LDContext.createMulti(user_context, org_context); + + FeatureRequest feWithVariation2 = featureEvent(multi_context, FLAG_KEY).flagVersion(FLAG_VERSION).variation(1) + .value(value).defaultValue(defaultVal).build(); + LDValue userJson = LDValue.buildObject() + .put("key", "userkey") + .put("anonymous", true) + .put("_meta", LDValue.parse("{\"redactedAttributes\":[\"name\", \"age\"]}")) + .build(); + LDValue orgJson = LDValue.buildObject() + .put("key", "orgkey") + .put("name", "me") + .put("age", 42) + .build(); + contextJson = LDValue.buildObject() + .put("kind", "multi") + .put("user", userJson) + .put("org", orgJson) + .build(); + + LDValue feJson2 = buildFeatureEventProps(FLAG_KEY) + .put("version", FLAG_VERSION) + .put("variation", 1) + .put("value", value) + .put("default", defaultVal) + .put("context", contextJson) + .build(); + assertJsonEquals(feJson2, getSingleOutputEvent(f, feWithVariation2)); + } + + @Test + public void identifyEventIsSerialized() throws IOException { + LDContext context = LDContext.builder("userkey").name("me").build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.Identify ie = identifyEvent(context); + LDValue ieJson = parseValue("{" + + "\"kind\":\"identify\"," + + "\"creationDate\":100000," + + "\"context\":{\"kind\":\"user\",\"key\":\"userkey\",\"name\":\"me\"}" + + "}"); + assertJsonEquals(ieJson, getSingleOutputEvent(f, ie)); + } + + @Test + public void customEventIsSerialized() throws IOException { + LDContext context = LDContext.builder("userkey").name("me").build(); + LDValue contextKeysJson = LDValue.buildObject().put("user", context.getKey()).build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.Custom ceWithoutData = customEvent(context, "customkey").build(); + LDValue ceJson1 = parseValue("{" + + "\"kind\":\"custom\"," + + "\"creationDate\":100000," + + "\"key\":\"customkey\"," + + "\"contextKeys\":" + contextKeysJson + + "}"); + assertJsonEquals(ceJson1, getSingleOutputEvent(f, ceWithoutData)); + + Event.Custom ceWithData = customEvent(context, "customkey").data(LDValue.of("thing")).build(); + LDValue ceJson2 = parseValue("{" + + "\"kind\":\"custom\"," + + "\"creationDate\":100000," + + "\"key\":\"customkey\"," + + "\"contextKeys\":" + contextKeysJson + "," + + "\"data\":\"thing\"" + + "}"); + assertJsonEquals(ceJson2, getSingleOutputEvent(f, ceWithData)); + + Event.Custom ceWithMetric = customEvent(context, "customkey").metricValue(2.5).build(); + LDValue ceJson3 = parseValue("{" + + "\"kind\":\"custom\"," + + "\"creationDate\":100000," + + "\"key\":\"customkey\"," + + "\"contextKeys\":" + contextKeysJson + "," + + "\"metricValue\":2.5" + + "}"); + assertJsonEquals(ceJson3, getSingleOutputEvent(f, ceWithMetric)); + + Event.Custom ceWithDataAndMetric = customEvent(context, "customkey").data(LDValue.of("thing")) + .metricValue(2.5).build(); + LDValue ceJson4 = parseValue("{" + + "\"kind\":\"custom\"," + + "\"creationDate\":100000," + + "\"key\":\"customkey\"," + + "\"contextKeys\":" + contextKeysJson + "," + + "\"data\":\"thing\"," + + "\"metricValue\":2.5" + + "}"); + assertJsonEquals(ceJson4, getSingleOutputEvent(f, ceWithDataAndMetric)); + } + + @Test + public void summaryEventIsSerialized() throws Exception { + LDValue value1a = LDValue.of("value1a"), value2a = LDValue.of("value2a"), value2b = LDValue.of("value2b"), + default1 = LDValue.of("default1"), default2 = LDValue.of("default2"), default3 = LDValue.of("default3"); + LDContext context1 = LDContext.create("key1"); + LDContext context2 = LDContext.createMulti(context1, LDContext.create(ContextKind.of("kind2"), "key2")); + + EventSummarizer es = new EventSummarizer(); + + es.summarizeEvent(1000, "first", 11, 1, value1a, default1, context1); // context1 has kind "user" + + es.summarizeEvent(1000, "second", 21, 1, value2a, default2, context1); + + es.summarizeEvent(1001, "first", 11, 1, value1a, default1, context1); + es.summarizeEvent(1001, "first", 12, 1, value1a, default1, context2); // context2 has kind "user" and kind "kind2" + + es.summarizeEvent(1001, "second", 21, 2, value2b, default2, context1); + es.summarizeEvent(1002, "second", 21, -1, default2, default2, context1); + + es.summarizeEvent(1002, "third", -1, -1, default3, default3, context1); + + EventSummary summary = es.getSummaryAndReset(); + + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + StringWriter w = new StringWriter(); + int count = f.writeOutputEvents(new Event[0], summary, w); + assertEquals(1, count); + LDValue outputEvent = parseValue(w.toString()).get(0); + + assertEquals("summary", outputEvent.get("kind").stringValue()); + assertEquals(1000, outputEvent.get("startDate").intValue()); + assertEquals(1002, outputEvent.get("endDate").intValue()); + + LDValue featuresJson = outputEvent.get("features"); + assertEquals(3, featuresJson.size()); + + LDValue firstJson = featuresJson.get("first"); + assertEquals("default1", firstJson.get("default").stringValue()); + assertThat(firstJson.get("contextKinds").values(), containsInAnyOrder( + LDValue.of("user"), LDValue.of("kind2"))); + assertThat(firstJson.get("counters").values(), containsInAnyOrder( + parseValue("{\"value\":\"value1a\",\"variation\":1,\"version\":11,\"count\":2}"), + parseValue("{\"value\":\"value1a\",\"variation\":1,\"version\":12,\"count\":1}") + )); + + LDValue secondJson = featuresJson.get("second"); + assertEquals("default2", secondJson.get("default").stringValue()); + assertThat(secondJson.get("contextKinds").values(), contains(LDValue.of("user"))); + assertThat(secondJson.get("counters").values(), containsInAnyOrder( + parseValue("{\"value\":\"value2a\",\"variation\":1,\"version\":21,\"count\":1}"), + parseValue("{\"value\":\"value2b\",\"variation\":2,\"version\":21,\"count\":1}"), + parseValue("{\"value\":\"default2\",\"version\":21,\"count\":1}") + )); + + LDValue thirdJson = featuresJson.get("third"); + assertEquals("default3", thirdJson.get("default").stringValue()); + assertThat(thirdJson.get("contextKinds").values(), contains(LDValue.of("user"))); + assertThat(thirdJson.get("counters").values(), contains( + parseValue("{\"unknown\":true,\"value\":\"default3\",\"count\":1}") + )); + } + + @Test + public void migrationOpEventIsSerialized() throws IOException { + LDContext context = LDContext.builder("user-key").name("me").build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.MigrationOp event = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + new Event.MigrationOp.ConsistencyMeasurement(true, 1), + new Event.MigrationOp.LatencyMeasurement(100l, 50l), + new Event.MigrationOp.ErrorMeasurement(false, true) + ); + + LDValue received = getSingleOutputEvent(f, event); + LDValue expected = LDValue.buildObject() + .put("operation", "read") + .put("kind", "migration_op") + .put("creationDate", 0) + .put("evaluation", LDValue.buildObject() + .put("key", "migration-key") + .put("variation", 1) + .put("version", 2) + .put("value", "live") + .put("default", "off") + .put("reason", LDValue.buildObject() + .put("kind", "FALLTHROUGH") + .build()).build()) + .put("contextKeys", LDValue.buildObject() + .put("user", "user-key") + .build()) + .put("samplingRatio", 2) + .put("measurements", LDValue.buildArray() + .add(LDValue.buildObject() + .put("key", "invoked") + .put("values", LDValue.buildObject() + .put("new", true) + .build()) + .build()) + .add(LDValue.buildObject() + .put("key", "consistent") + .put("value", true) + .build()) + .add(LDValue.buildObject() + .put("key", "latency_ms") + .put("values", LDValue.buildObject() + .put("old", 100) + .put("new", 50) + .build()) + .build()) + .add(LDValue.buildObject() + .put("key", "error") + .put("values", LDValue.buildObject() + .put("new", true) + .build()) + .build()) + .build()) + .build(); + + assertJsonEquals(expected, received); + } + + @Test + public void migrationOpEventSerializationCanExcludeOptionalItems() throws IOException { + LDContext context = LDContext.builder("user-key").name("me").build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.MigrationOp event = new Event.MigrationOp( + 0, + context, + "migration-key", + -1, + -1, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 1, + "read", + new Event.MigrationOp.InvokedMeasurement(true, false), + null, + null, + null + ); + + LDValue received1 = getSingleOutputEvent(f, event); + Event.MigrationOp event2 = new Event.MigrationOp( + 0, + context, + "migration-key", + -1, + -1, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 1, + "read", + new Event.MigrationOp.InvokedMeasurement(true, false), + null, + // Null measurement, versus a measurement containing no values, should behave the same. + new Event.MigrationOp.LatencyMeasurement(null, null), + new Event.MigrationOp.ErrorMeasurement(false, false) + ); + LDValue received2 = getSingleOutputEvent(f, event2); + + LDValue expected = LDValue.buildObject() + .put("operation", "read") + .put("kind", "migration_op") + .put("creationDate", 0) + .put("evaluation", LDValue.buildObject() + .put("key", "migration-key") + .put("value", "live") + .put("default", "off") + .put("reason", LDValue.buildObject() + .put("kind", "FALLTHROUGH") + .build()).build()) + .put("contextKeys", LDValue.buildObject() + .put("user", "user-key") + .build()) + .put("measurements", LDValue.buildArray() + .add(LDValue.buildObject() + .put("key", "invoked") + .put("values", LDValue.buildObject() + .put("old", true) + .build()) + .build()) + .build()) + .build(); + + assertJsonEquals(expected, received1); + assertJsonEquals(expected, received2); + } + + @Test + public void migrationOpEventCanSerializeDifferentLatencyPermutations() throws IOException { + LDContext context = LDContext.builder("user-key").name("me").build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.MigrationOp event1 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + new Event.MigrationOp.LatencyMeasurement(null, 50l), + null + ); + + LDValue received1 = getSingleOutputEvent(f, event1); + assertJsonEquals(LDValue.buildObject() + .put("key", "latency_ms") + .put("values", LDValue.buildObject() + .put("new", 50) + .build()) + .build(), received1.get("measurements").get(1)); + + Event.MigrationOp event2 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + new Event.MigrationOp.LatencyMeasurement(50l, null), + null + ); + + LDValue received2 = getSingleOutputEvent(f, event2); + assertJsonEquals(LDValue.buildObject() + .put("key", "latency_ms") + .put("values", LDValue.buildObject() + .put("old", 50) + .build()) + .build(), received2.get("measurements").get(1)); + + Event.MigrationOp event3 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + new Event.MigrationOp.LatencyMeasurement(50l, 150l), + null + ); + + LDValue received3 = getSingleOutputEvent(f, event3); + assertJsonEquals(LDValue.buildObject() + .put("key", "latency_ms") + .put("values", LDValue.buildObject() + .put("old", 50) + .put("new", 150) + .build()) + .build(), received3.get("measurements").get(1)); + } + + @Test + public void migrationOpEventCanSerializeDifferentErrorPermutations() throws IOException { + LDContext context = LDContext.builder("user-key").name("me").build(); + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + + Event.MigrationOp event1 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + null, + new Event.MigrationOp.ErrorMeasurement(true, false) + ); + + LDValue received1 = getSingleOutputEvent(f, event1); + assertJsonEquals(LDValue.buildObject() + .put("key", "error") + .put("values", LDValue.buildObject() + .put("old", true) + .build()) + .build(), received1.get("measurements").get(1)); + + Event.MigrationOp event2 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + null, + new Event.MigrationOp.ErrorMeasurement(false, true) + ); + + LDValue received2 = getSingleOutputEvent(f, event2); + assertJsonEquals(LDValue.buildObject() + .put("key", "error") + .put("values", LDValue.buildObject() + .put("new", true) + .build()) + .build(), received2.get("measurements").get(1)); + + Event.MigrationOp event3 = new Event.MigrationOp( + 0, + context, + "migration-key", + 1, + 2, + LDValue.of("live"), + LDValue.of("off"), + EvaluationReason.fallthrough(false), + 2, + "read", + new Event.MigrationOp.InvokedMeasurement(false, true), + null, + null, + new Event.MigrationOp.ErrorMeasurement(true, true) + ); + + LDValue received3 = getSingleOutputEvent(f, event3); + assertJsonEquals(LDValue.buildObject() + .put("key", "error") + .put("values", LDValue.buildObject() + .put("old", true) + .put("new", true) + .build()) + .build(), received3.get("measurements").get(1)); + } + + @Test + public void unknownEventClassIsNotSerialized() throws Exception { + // This shouldn't be able to happen in reality. + Event event = new FakeEventClass(1000, LDContext.create("user")); + + EventOutputFormatter f = new EventOutputFormatter(defaultEventsConfig()); + StringWriter w = new StringWriter(); + f.writeOutputEvents(new Event[] { event }, new EventSummary(), w); + + assertEquals("[]", w.toString()); + } + + private static class FakeEventClass extends Event { + public FakeEventClass(long creationDate, LDContext context) { + super(creationDate, context); + } + } + + private static LDValue parseValue(String json) { + return gson.fromJson(json, LDValue.class); + } + + private LDValue getSingleOutputEvent(EventOutputFormatter f, Event event) throws IOException { + StringWriter w = new StringWriter(); + int count = f.writeOutputEvents(new Event[] { event }, new EventSummary(), w); + assertEquals(1, count); + return parseValue(w.toString()).get(0); + } + + private void testContextKeysSerialization(LDContext context, LDValue expectedJsonValue) throws IOException { + EventsConfiguration config = makeEventsConfig(false, null); + EventOutputFormatter f = new EventOutputFormatter(config); + + Event.Custom customEvent = customEvent(context, "eventkey").build(); + LDValue outputEvent = getSingleOutputEvent(f, customEvent); + assertJsonEquals(expectedJsonValue, outputEvent.get("contextKeys")); + assertJsonEquals(LDValue.ofNull(), outputEvent.get("context")); + } + + private void testInlineContextSerialization(LDContext context, LDValue expectedJsonValue, EventsConfiguration baseConfig) throws IOException { + EventsConfiguration config = makeEventsConfig(baseConfig.allAttributesPrivate, baseConfig.privateAttributes); + EventOutputFormatter f = new EventOutputFormatter(config); + + Event.FeatureRequest featureEvent = featureEvent(context, FLAG_KEY).build(); + LDValue outputEvent = getSingleOutputEvent(f, featureEvent); + assertJsonEquals(LDValue.ofNull(), outputEvent.get("contextKeys")); + assertJsonEquals(expectedJsonValue, outputEvent.get("context")); + + + Event.Identify identifyEvent = identifyEvent(context); + outputEvent = getSingleOutputEvent(f, identifyEvent); + assertJsonEquals(LDValue.ofNull(), outputEvent.get("contextKeys")); + assertJsonEquals(expectedJsonValue, outputEvent.get("context")); + + Event.Index indexEvent = new Event.Index(0, context); + outputEvent = getSingleOutputEvent(f, indexEvent); + assertJsonEquals(LDValue.ofNull(), outputEvent.get("contextKeys")); + assertJsonEquals(expectedJsonValue, outputEvent.get("context")); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventSummarizerTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventSummarizerTest.java new file mode 100644 index 0000000..8426512 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/EventSummarizerTest.java @@ -0,0 +1,199 @@ +package com.launchdarkly.sdk.internal.events; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.launchdarkly.sdk.ContextKind; +import com.launchdarkly.sdk.LDContext; +import com.launchdarkly.sdk.LDValue; +import com.launchdarkly.sdk.internal.BaseTest; +import com.launchdarkly.sdk.internal.events.EventSummarizer.CounterValue; +import com.launchdarkly.sdk.internal.events.EventSummarizer.EventSummary; +import com.launchdarkly.sdk.internal.events.EventSummarizer.FlagInfo; +import com.launchdarkly.sdk.internal.events.EventSummarizer.SimpleIntKeyedMap; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +@SuppressWarnings("javadoc") +public class EventSummarizerTest extends BaseTest { + private static final LDContext context = LDContext.create("key"); + + @Test + public void summarizerCanBeCleared() { + EventSummarizer es = new EventSummarizer(); + assertTrue(es.isEmpty()); + + es.summarizeEvent(1000, "flagkey", 1, 0, LDValue.ofNull(), LDValue.ofNull(), context); + + assertFalse(es.isEmpty()); + + es.clear(); + + assertTrue(es.isEmpty()); + } + + @Test + public void summarizeEventSetsStartAndEndDates() { + EventSummarizer es = new EventSummarizer(); + + for (long timestamp: new long[] { 2000, 1000, 1500 }) { + es.summarizeEvent(timestamp, "flagkey", 1, 0, LDValue.ofNull(), LDValue.ofNull(), context); + } + + EventSummarizer.EventSummary data = es.getSummaryAndReset(); + + assertEquals(1000, data.startDate); + assertEquals(2000, data.endDate); + } + + @Test + public void summarizeEventIncrementsCounters() { + EventSummarizer es = new EventSummarizer(); + String flagKey1 = "key1", flagKey2 = "key2", unknownFlagKey = "badkey"; + int flagVersion1 = 11, flagVersion2 = 22; + LDValue value1 = LDValue.of("value1"), value2 = LDValue.of("value2"), value99 = LDValue.of("value99"), + default1 = LDValue.of("default1"), default2 = LDValue.of("default2"), default3 = LDValue.of("default3"); + LDContext multiKindContext = LDContext.createMulti( + context, LDContext.create(ContextKind.of("kind2"), "key2")); + long timestamp = 1000; + + es.summarizeEvent(timestamp, flagKey1, flagVersion1, 1, value1, default1, context); + es.summarizeEvent(timestamp, flagKey1, flagVersion1, 2, value2, default1, context); + es.summarizeEvent(timestamp, flagKey2, flagVersion2, 1, value99, default2, multiKindContext); + es.summarizeEvent(timestamp, flagKey1, flagVersion1, 1, value1, default1, context); + es.summarizeEvent(timestamp, unknownFlagKey, -1, -1, default3, default3, context); + + EventSummarizer.EventSummary data = es.getSummaryAndReset(); + + assertThat(data.counters, equalTo(ImmutableMap.builder() + .put(flagKey1, new FlagInfo(default1, + new SimpleIntKeyedMap>() + .put(flagVersion1, new SimpleIntKeyedMap() + .put(1, new CounterValue(2, value1)) + .put(2, new CounterValue(1, value2)) + ), + ImmutableSet.of("user"))) + .put(flagKey2, new FlagInfo(default2, + new SimpleIntKeyedMap>() + .put(flagVersion2, new SimpleIntKeyedMap() + .put(1, new CounterValue(1, value99)) + ), + ImmutableSet.of("user", "kind2"))) + .put(unknownFlagKey, new FlagInfo(default3, + new SimpleIntKeyedMap>() + .put(-1, new SimpleIntKeyedMap() + .put(-1, new CounterValue(1, default3)) + ), + ImmutableSet.of("user"))) + .build())); + } + + // The following implementations are used only in debug/test code, but may as well test them + + @Test + public void counterValueEquality() { + CounterValue value1 = new CounterValue(1, LDValue.of("a")); + CounterValue value2 = new CounterValue(1, LDValue.of("a")); + assertEquals(value1, value2); + assertEquals(value2, value1); + + for (CounterValue notEqualValue: new CounterValue[] { + new CounterValue(2, LDValue.of("a")), + new CounterValue(1, LDValue.of("b")) + }) { + assertNotEquals(value1, notEqualValue); + assertNotEquals(notEqualValue, value1); + + assertNotEquals(value1, null); + assertNotEquals(value1, "x"); + } + } + + @Test + public void counterValueToString() { + assertEquals("(1,\"a\")", new CounterValue(1, LDValue.of("a")).toString()); + } + + @Test + public void eventSummaryEquality() { + String key1 = "key1", key2 = "key2"; + int variation1 = 0, variation2 = 1, variation3 = 2, version1 = 10, version2 = 20; + LDValue value1 = LDValue.of(1), value2 = LDValue.of(2), value3 = LDValue.of(3), + default1 = LDValue.of(-1), default2 = LDValue.of(-2); + + EventSummarizer es1 = new EventSummarizer(); + es1.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + es1.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + es1.summarizeEvent(1000, key1, version2, variation2, value2, default1, context); + es1.summarizeEvent(2000, key2, version2, variation3, value3, default2, context); + + EventSummarizer es2 = new EventSummarizer(); // same operations in different order + es2.summarizeEvent(2000, key2, version2, variation3, value3, default2, context); + es2.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + es2.summarizeEvent(1000, key1, version2, variation2, value2, default1, context); + es2.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + + EventSummarizer es3 = new EventSummarizer(); // same operations with different start time + es3.summarizeEvent(1100, key1, version1, variation1, value1, default1, context); + es3.summarizeEvent(1100, key1, version1, variation1, value1, default1, context); + es3.summarizeEvent(1100, key1, version2, variation2, value2, default1, context); + es3.summarizeEvent(2000, key2, version2, variation3, value3, default2, context); + + EventSummarizer es4 = new EventSummarizer(); // same operations with different end time + es4.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + es4.summarizeEvent(1000, key1, version1, variation1, value1, default1, context); + es4.summarizeEvent(1000, key1, version2, variation2, value2, default1, context); + es4.summarizeEvent(2100, key2, version2, variation3, value3, default2, context); + + EventSummary summary1 = es1.getSummaryAndReset(); + EventSummary summary2 = es2.getSummaryAndReset(); + EventSummary summary3 = es3.getSummaryAndReset(); + EventSummary summary4 = es4.getSummaryAndReset(); + + assertEquals(summary1, summary2); + assertEquals(summary2, summary1); + + assertEquals(0, summary1.hashCode()); // see comment on hashCode + + assertNotEquals(summary1, summary3); + assertNotEquals(summary1, summary4); + + assertNotEquals(summary1, null); + assertNotEquals(summary1, "x"); + } + + @Test + public void simpleIntKeyedMapBehavior() { + // Tests the behavior of the inner class that we use instead of a Map. + SimpleIntKeyedMap m = new SimpleIntKeyedMap<>(); + int initialCapacity = m.capacity(); + + assertEquals(0, m.size()); + assertNotEquals(0, initialCapacity); + assertNull(m.get(1)); + + for (int i = 0; i < initialCapacity; i++) { + m.put(i * 100, "value" + i); + } + + assertEquals(initialCapacity, m.size()); + assertEquals(initialCapacity, m.capacity()); + + for (int i = 0; i < initialCapacity; i++) { + assertEquals("value" + i, m.get(i * 100)); + } + assertNull(m.get(33)); + + m.put(33, "other"); + assertNotEquals(initialCapacity, m.capacity()); + assertEquals(initialCapacity + 1, m.size()); + assertEquals("other", m.get(33)); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/package-info.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/package-info.java new file mode 100644 index 0000000..c901013 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/events/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing SDK event functionality. + */ +package com.launchdarkly.sdk.internal.events; diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersAddQueryParamToUriTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersAddQueryParamToUriTest.java new file mode 100644 index 0000000..b051297 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersAddQueryParamToUriTest.java @@ -0,0 +1,61 @@ +package com.launchdarkly.sdk.internal.http; + +import static org.junit.Assert.assertEquals; + +import java.net.URI; +import java.util.Arrays; +import java.util.Collection; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; + +import com.launchdarkly.sdk.internal.BaseTest; + +@RunWith(Parameterized.class) +public class HttpHelpersAddQueryParamToUriTest extends BaseTest { + + @Parameter(0) + public URI inputURI; + @Parameter(1) + public String inputKey; + @Parameter(2) + public String inputValue; + @Parameter(3) + public URI expectedURI; + + @Parameterized.Parameters + public static Collection data() { + // parameters and expected output + return Arrays.asList(new Object[][] { + // nice case + { URI.create("http://1.1.1.1"), "filter", "myFilter", URI.create("http://1.1.1.1/?filter=myFilter") }, + // encoding check + { URI.create("http://1.1.1.1"), "filter", "encoding necessary +! %& ( )", URI.create("http://1.1.1.1/?filter=encoding%20necessary%20%2B%21%20%25%26%20%28%20%29") }, + // existing param + { URI.create("http://1.1.1.1/?withReasons=true"), "filter", "myFilter", URI.create("http://1.1.1.1/?withReasons=true&filter=myFilter") }, + // order affects result (just including this for determinism, not a spec point) + { URI.create("http://1.1.1.1/?filter=myFilter"), "withReasons", "true", URI.create("http://1.1.1.1/?filter=myFilter&withReasons=true") }, + // existing path + { URI.create("http://1.1.1.1/a/path"), "filter", "myFilter", URI.create("http://1.1.1.1/a/path?filter=myFilter") }, + + // below are weird cases that we aren't expecting to encounter, just including for documentation of behavior + // adding param again + { URI.create("http://1.1.1.1/?filter=myFilter"), "filter", "anotherFilter", URI.create("http://1.1.1.1/?filter=myFilter&filter=anotherFilter") }, + // adding empty params and values + { URI.create("http://1.1.1.1/?filter=myFilter"), "", "", URI.create("http://1.1.1.1/?filter=myFilter&=") }, + }); + } + + @Test + public void TestParametricAddQueryParam() { + assertEquals(this.expectedURI, HttpHelpers.addQueryParam(this.inputURI, this.inputKey, this.inputValue)); + } + + @Test(expected = IllegalArgumentException.class) + public void TestImproperURIThrowsException() { + URI uriUnderTest = URI.create("ImARidiculousURI/?existingparam=existingvalue"); + HttpHelpers.addQueryParam(uriUnderTest, "notImportant", "notImportant"); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersConcatUriPathTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersConcatUriPathTest.java new file mode 100644 index 0000000..9cbc170 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpHelpersConcatUriPathTest.java @@ -0,0 +1,80 @@ +package com.launchdarkly.sdk.internal.http; + +import static org.junit.Assert.assertEquals; + +import java.net.URI; +import java.util.Arrays; +import java.util.Collection; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; + +import com.launchdarkly.sdk.internal.BaseTest; + +@RunWith(Parameterized.class) +public class HttpHelpersConcatUriPathTest extends BaseTest { + + @Parameter(0) + public URI inputURI; + @Parameter(1) + public String inputPath; + @Parameter(2) + public URI expectedURI; + + @Parameterized.Parameters + public static Collection data() { + + // parameters and expected output + return Arrays.asList(new Object[][] { + { URI.create("http://1.1.1.1"), "/status", URI.create("http://1.1.1.1/status") }, + { URI.create("http://1.1.1.1/"), "/status", URI.create("http://1.1.1.1/status") }, + { URI.create("http://1.1.1.1/"), "//status", URI.create("http://1.1.1.1/status") }, + { URI.create("http://google.com"), "/status", URI.create("http://google.com/status") }, + { URI.create("http://google.com"), "//status", URI.create("http://google.com/status") }, + { URI.create("http://google.com"), "///status", URI.create("http://google.com/status") }, + { URI.create("http://google.com/"), "/status", URI.create("http://google.com/status") }, + { URI.create("http://google.com/"), "//status", URI.create("http://google.com/status") }, + { URI.create("http://google.com/"), "///status", URI.create("http://google.com/status") }, + { URI.create("http://google.com//"), "/status", URI.create("http://google.com/status") }, + { URI.create("http://google.com//"), "//status", URI.create("http://google.com/status") }, + { URI.create("http://google.com//"), "///status", URI.create("http://google.com/status") }, + { URI.create("http://google.com///"), "/status", URI.create("http://google.com/status") }, + { URI.create("http://google.com///"), "//status", URI.create("http://google.com/status") }, + { URI.create("http://google.com///"), "///status", URI.create("http://google.com/status") }, + { URI.create("https://google.com"), "/status", URI.create("https://google.com/status") }, + { URI.create("https://google.com"), "//status", URI.create("https://google.com/status") }, + { URI.create("https://google.com"), "///status", URI.create("https://google.com/status") }, + { URI.create("https://google.com/"), "/status", URI.create("https://google.com/status") }, + { URI.create("https://google.com/"), "//status", URI.create("https://google.com/status") }, + { URI.create("https://google.com/"), "///status", URI.create("https://google.com/status") }, + { URI.create("https://google.com//"), "/status", URI.create("https://google.com/status") }, + { URI.create("https://google.com//"), "//status", URI.create("https://google.com/status") }, + { URI.create("https://google.com//"), "///status", URI.create("https://google.com/status") }, + { URI.create("https://google.com///"), "/status", URI.create("https://google.com/status") }, + { URI.create("https://google.com///"), "//status", URI.create("https://google.com/status") }, + { URI.create("https://google.com///"), "///status", URI.create("https://google.com/status") }, + { URI.create("https://google.com:1234"), "/status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234"), "//status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234"), "///status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234/"), "/status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234/"), "//status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234/"), "///status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234//"), "/status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234//"), "//status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234//"), "///status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234///"), "/status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234///"), "//status", URI.create("https://google.com:1234/status") }, + { URI.create("https://google.com:1234///"), "///status", URI.create("https://google.com:1234/status") }, + + // test to make sure query params don't get removed by append + { URI.create("https://google.com:1234/some/root/path/?filter=myFilter"), "/toAppend", URI.create("https://google.com:1234/some/root/path/toAppend?filter=myFilter") }, + }); + } + + @Test + public void TestConcatenateUriPath() { + assertEquals(this.expectedURI, HttpHelpers.concatenateUriPath(this.inputURI, this.inputPath)); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpPropertiesTest.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpPropertiesTest.java new file mode 100644 index 0000000..b73111a --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/HttpPropertiesTest.java @@ -0,0 +1,98 @@ +package com.launchdarkly.sdk.internal.http; + +import com.google.common.collect.ImmutableMap; +import com.launchdarkly.sdk.internal.BaseTest; + +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; + +import okhttp3.Headers; +import okhttp3.OkHttpClient; + +@SuppressWarnings("javadoc") +public class HttpPropertiesTest extends BaseTest { + @Test + public void testConnectTimeout() { + HttpProperties hp = new HttpProperties( + 100000, + null, null, null, null, null, 0, null, null); + OkHttpClient httpClient = hp.toHttpClientBuilder().build(); + try { + assertEquals(100000, httpClient.connectTimeoutMillis()); + } finally { + HttpProperties.shutdownHttpClient(httpClient); + } + } + + @Test + public void testSocketTimeout() { + HttpProperties hp = new HttpProperties( + 0, null, null, null, null, null, + 100000, + null, null); + OkHttpClient httpClient = hp.toHttpClientBuilder().build(); + try { + assertEquals(100000, httpClient.readTimeoutMillis()); + } finally { + HttpProperties.shutdownHttpClient(httpClient); + } + } + + @Test + public void testDefaultHeaders() { + Map headers = new HashMap<>(); + headers.put("name1", "value1"); + headers.put("name2", "value2"); + HttpProperties hp = new HttpProperties( + 0, + headers, + null, null, null, null, 0, null, null); + + Map configured = ImmutableMap.copyOf(hp.getDefaultHeaders()); + assertEquals(headers, configured); + + Headers built = hp.toHeadersBuilder().build(); + assertEquals("value1", built.get("name1")); + assertEquals("value2", built.get("name2")); + } + + @Test + public void testTransformedDefaultHeaders() { + Map headers = new HashMap<>(); + headers.put("name1", "value1"); + headers.put("name2", "value2"); + HeadersTransformer headersTransformer = new HeadersTransformer() { + @Override + public void updateHeaders(Map h) { + h.put("name1", h.get("name1") + "a"); + } + }; + HttpProperties hp = new HttpProperties( + 0, + headers, headersTransformer, + null, null, null, 0, null, null); + + Map configured = ImmutableMap.copyOf(hp.getDefaultHeaders()); + assertEquals(headers, configured); + + Map transformed = ImmutableMap.copyOf(hp.getTransformedDefaultHeaders()); + assertEquals("value1a", transformed.get("name1")); + assertEquals("value2", transformed.get("name2")); + + Headers built = hp.toHeadersBuilder().build(); + assertEquals("value1a", built.get("name1")); + assertEquals("value2", built.get("name2")); + } + + @Test + public void testSharedHttpClient() { + OkHttpClient httpClient = new OkHttpClient(); + HttpProperties hp = new HttpProperties(httpClient, null, null); + assertSame(httpClient, hp.getSharedHttpClient()); + } +} diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/package-info.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/package-info.java new file mode 100644 index 0000000..51998e9 --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/http/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing SDK HTTP functionality. + */ +package com.launchdarkly.sdk.internal.http; diff --git a/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/package-info.java b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/package-info.java new file mode 100644 index 0000000..6dcdf1f --- /dev/null +++ b/lib/shared/internal/src/test/java/com/launchdarkly/sdk/internal/package-info.java @@ -0,0 +1,4 @@ +/** + * Test classes and methods for testing general SDK functionality. + */ +package com.launchdarkly.sdk.internal; diff --git a/release-please-config.json b/release-please-config.json index 9e1e896..c76408f 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -11,6 +11,14 @@ "gradle.properties" ] }, + "lib/shared/internal": { + "release-type": "simple", + "bump-minor-pre-major": true, + "include-v-in-tag": false, + "extra-files": [ + "gradle.properties" + ] + }, "lib/shared/common": { "release-type": "simple", "bump-minor-pre-major": true,