From 216de9f9c58553d120cfd04b869009ebabf5e1bf Mon Sep 17 00:00:00 2001 From: Florian Hussonnois Date: Thu, 25 Jul 2024 01:06:06 +0200 Subject: [PATCH] feat: add plugin-transforms-grok and plugin-transforms-json --- .editorconfig | 25 ++ .gitattributes | 9 + .github/ISSUE_TEMPLATE/bug.yml | 55 ++++ .github/ISSUE_TEMPLATE/config.yml | 7 + .github/ISSUE_TEMPLATE/feature.yml | 15 ++ .github/ISSUE_TEMPLATE/other.yml | 8 + .github/dependabot.yml | 23 ++ .github/pull_request_template.md | 37 +++ .github/workflows/main.yml | 150 +++++++++++ .gitignore | 16 ++ LICENSE | 191 +++++++++++++ README.md | 58 ++++ build.gradle | 254 ++++++++++++++++++ gradle.properties | 2 + gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 43462 bytes gradle/wrapper/gradle-wrapper.properties | 7 + gradlew | 249 +++++++++++++++++ gradlew.bat | 92 +++++++ lombok.config | 5 + plugin-transforms-grok/build.gradle | 17 ++ .../plugin/transforms/grok/GrokTransform.java | 220 +++++++++++++++ .../plugin/transforms/grok/data/Type.java | 65 +++++ .../grok/data/internal/TypeConverter.java | 97 +++++++ .../grok/pattern/GrokCaptureExtractor.java | 38 +++ .../grok/pattern/GrokCaptureGroup.java | 76 ++++++ .../grok/pattern/GrokException.java | 14 + .../transforms/grok/pattern/GrokMatcher.java | 116 ++++++++ .../transforms/grok/pattern/GrokPattern.java | 32 +++ .../grok/pattern/GrokPatternCompiler.java | 91 +++++++ .../grok/pattern/GrokPatternResolver.java | 214 +++++++++++++++ .../src/main/resources/patterns/aws | 40 +++ .../src/main/resources/patterns/bacula | 60 +++++ .../src/main/resources/patterns/bind | 20 ++ .../src/main/resources/patterns/bro | 37 +++ .../src/main/resources/patterns/exim | 33 +++ .../src/main/resources/patterns/firewalls | 118 ++++++++ .../src/main/resources/patterns/grok-patterns | 102 +++++++ .../src/main/resources/patterns/haproxy | 46 ++++ .../src/main/resources/patterns/httpd | 24 ++ .../src/main/resources/patterns/java | 41 +++ .../src/main/resources/patterns/junos | 20 ++ .../src/main/resources/patterns/linux-syslog | 23 ++ .../src/main/resources/patterns/maven | 8 + .../src/main/resources/patterns/mcollective | 11 + .../src/main/resources/patterns/mongodb | 14 + .../src/main/resources/patterns/nagios | 131 +++++++++ .../src/main/resources/patterns/postgresql | 9 + .../src/main/resources/patterns/rails | 20 ++ .../src/main/resources/patterns/redis | 10 + .../src/main/resources/patterns/ruby | 9 + .../src/main/resources/patterns/squid | 13 + .../src/main/resources/patterns/zeek | 40 +++ .../transforms/grok/GrokTransformTest.java | 133 +++++++++ .../grok/pattern/GrokMatcherTest.java | 36 +++ .../grok/pattern/GrokPatternCompilerTest.java | 76 ++++++ .../grok/pattern/GrokPatternResolverTest.java | 23 ++ .../src/test/resources/allure.properties | 1 + .../src/test/resources/application.yml | 9 + .../src/test/resources/logback.xml | 11 + plugin-transforms-json/build.gradle | 17 ++ .../transforms/grok/JSONataTransform.java | 181 +++++++++++++ .../transforms/grok/JSONataTransformTest.java | 181 +++++++++++++ .../src/test/resources/allure.properties | 1 + .../src/test/resources/application.yml | 9 + .../src/test/resources/logback.xml | 11 + settings.gradle | 4 + 66 files changed, 3705 insertions(+) create mode 100644 .editorconfig create mode 100644 .gitattributes create mode 100644 .github/ISSUE_TEMPLATE/bug.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature.yml create mode 100644 .github/ISSUE_TEMPLATE/other.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/main.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 build.gradle create mode 100644 gradle.properties create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100755 gradlew create mode 100644 gradlew.bat create mode 100644 lombok.config create mode 100644 plugin-transforms-grok/build.gradle create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/GrokTransform.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/Type.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/internal/TypeConverter.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureExtractor.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureGroup.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokException.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcher.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPattern.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompiler.java create mode 100644 plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolver.java create mode 100644 plugin-transforms-grok/src/main/resources/patterns/aws create mode 100644 plugin-transforms-grok/src/main/resources/patterns/bacula create mode 100644 plugin-transforms-grok/src/main/resources/patterns/bind create mode 100644 plugin-transforms-grok/src/main/resources/patterns/bro create mode 100644 plugin-transforms-grok/src/main/resources/patterns/exim create mode 100644 plugin-transforms-grok/src/main/resources/patterns/firewalls create mode 100644 plugin-transforms-grok/src/main/resources/patterns/grok-patterns create mode 100644 plugin-transforms-grok/src/main/resources/patterns/haproxy create mode 100644 plugin-transforms-grok/src/main/resources/patterns/httpd create mode 100644 plugin-transforms-grok/src/main/resources/patterns/java create mode 100644 plugin-transforms-grok/src/main/resources/patterns/junos create mode 100644 plugin-transforms-grok/src/main/resources/patterns/linux-syslog create mode 100644 plugin-transforms-grok/src/main/resources/patterns/maven create mode 100644 plugin-transforms-grok/src/main/resources/patterns/mcollective create mode 100644 plugin-transforms-grok/src/main/resources/patterns/mongodb create mode 100644 plugin-transforms-grok/src/main/resources/patterns/nagios create mode 100644 plugin-transforms-grok/src/main/resources/patterns/postgresql create mode 100644 plugin-transforms-grok/src/main/resources/patterns/rails create mode 100644 plugin-transforms-grok/src/main/resources/patterns/redis create mode 100644 plugin-transforms-grok/src/main/resources/patterns/ruby create mode 100644 plugin-transforms-grok/src/main/resources/patterns/squid create mode 100644 plugin-transforms-grok/src/main/resources/patterns/zeek create mode 100644 plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/GrokTransformTest.java create mode 100644 plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcherTest.java create mode 100644 plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompilerTest.java create mode 100644 plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolverTest.java create mode 100644 plugin-transforms-grok/src/test/resources/allure.properties create mode 100644 plugin-transforms-grok/src/test/resources/application.yml create mode 100644 plugin-transforms-grok/src/test/resources/logback.xml create mode 100644 plugin-transforms-json/build.gradle create mode 100644 plugin-transforms-json/src/main/java/io/kestra/plugin/transforms/grok/JSONataTransform.java create mode 100644 plugin-transforms-json/src/test/java/io/kestra/plugin/transforms/grok/JSONataTransformTest.java create mode 100644 plugin-transforms-json/src/test/resources/allure.properties create mode 100644 plugin-transforms-json/src/test/resources/application.yml create mode 100644 plugin-transforms-json/src/test/resources/logback.xml create mode 100644 settings.gradle diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..83014c3 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,25 @@ +root = true + +[*] +charset=utf-8 +end_of_line=lf +insert_final_newline=false +trim_trailing_whitespace=true +indent_style=space +indent_size=4 +continuation_indent_size=4 + +[*.yml] +indent_size=2 + +[*.md] +indent_size=2 + +[*.yaml] +indent_size=2 + +[*.json] +indent_size=2 + +[*.css] +indent_size=2 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..ff74ae5 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,9 @@ +# Match the .editorconfig +* text=auto eol=lf + +# Scripts +*.bat text eol=crlf +*.sh text eol=lf + +# Gradle wrapper +/gradlew text eol=lf diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000..61afa8d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,55 @@ +name: Bug Report +description: File a bug report +body: + - type: markdown + attributes: + value: | + Thanks for reporting an issue, please review the task list below before submitting the issue. Your issue report will be closed if the issue is incomplete and the below tasks not completed. + + NOTE: If you are unsure about something and the issue is more of a question a better place to ask questions is on [Github Discussions](https://github.com/kestra-io/kestra/discussions) or [Slack](https://api.kestra.io/v1/communities/slack/redirect). + - type: textarea + attributes: + label: Expected Behavior + description: A concise description of what you expected to happen. + placeholder: Tell us what should happen + validations: + required: false + - type: textarea + attributes: + label: Actual Behaviour + description: A concise description of what you're experiencing. + placeholder: Tell us what happens instead + validations: + required: false + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + validations: + required: false + - type: textarea + attributes: + label: Environment Information + description: Environment information where the problem occurs. + value: | + - Kestra Version: + - Plugin version: + - Operating System (OS / Docker / Kubernetes): + - Java Version (If not docker): + validations: + required: true + - type: textarea + attributes: + label: Example flow + description: Example flow source. + placeholder: | + If relevant, an example flow + validations: + required: false +labels: + - bug diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..a6d0656 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,7 @@ +contact_links: + - name: GitHub Discussions + url: https://github.com/kestra-io/kestra/discussions + about: Ask questions about Kestra on Github + - name: Chat + url: https://api.kestra.io/v1/communities/slack/redirect + about: Chat with us on Slack. diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 0000000..11f8a1a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,15 @@ +name: Feature request +description: Create a new feature request +body: + - type: markdown + attributes: + value: | + Please describe the feature you want for Kestra to implement, before that check if there is already an existing issue to add it. + - type: textarea + attributes: + label: Feature description + placeholder: Tell us what feature you would like for Kestra to have and what problem is it going to solve + validations: + required: true +labels: + - enhancement diff --git a/.github/ISSUE_TEMPLATE/other.yml b/.github/ISSUE_TEMPLATE/other.yml new file mode 100644 index 0000000..20f261f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/other.yml @@ -0,0 +1,8 @@ +name: Other +description: Something different +body: + - type: textarea + attributes: + label: Issue description + validations: + required: true diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..cd5db57 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,23 @@ +# See GitHub's docs for more information on this file: +# https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "weekly" + labels: + - "dependency-upgrade" + open-pull-requests-limit: 50 + + # Maintain dependencies for Gradle modules + - package-ecosystem: "gradle" + directory: "/" + schedule: + # Check for updates to Gradle modules every week + interval: "weekly" + labels: + - "dependency-upgrade" + open-pull-requests-limit: 50 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..68570ae --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,37 @@ + + +### What changes are being made and why? + + +--- + +### How the changes have been QAed? + + + +--- + +### Setup Instructions + + diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..0c22a28 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,150 @@ +name: Main + +on: + push: + branches: + - main + tags: + - v* + + pull_request: + branches: + - main + + workflow_dispatch: + inputs: + skip-test: + description: 'Skip test' + required: false + type: string + default: "false" + +jobs: + check: + name: Check & Publish + runs-on: ubuntu-latest + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }} + steps: + - uses: actions/checkout@v4 + + # Caches + - name: Gradle cache + uses: actions/cache@v4 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle*.properties') }} + restore-keys: | + ${{ runner.os }}-gradle- + + # JDK + - name: Set up JDK + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: 21 + + - name: Validate Gradle wrapper + uses: gradle/actions/wrapper-validation@v3 + + # Gradle check + - name: Build with Gradle + if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }} + run: ./gradlew check --parallel + + # Allure check + - name: Auth to Google Cloud + id: auth + if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 }} + uses: 'google-github-actions/auth@v2' + with: + credentials_json: '${{ secrets.GOOGLE_SERVICE_ACCOUNT }}' + + - uses: rlespinasse/github-slug-action@v4 + + - name: Publish allure report + uses: andrcuns/allure-publish-action@v2.6.0 + if: ${{ always() && env.GOOGLE_SERVICE_ACCOUNT != 0 && (github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '') }} + env: + GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + JAVA_HOME: /usr/lib/jvm/default-jvm/ + with: + storageType: gcs + resultsGlob: "**/build/allure-results" + bucket: internal-kestra-host + baseUrl: "https://internal.kestra.io" + prefix: ${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'allure/java') }} + copyLatest: true + ignoreMissingResults: true + + # Jacoco + - name: 'Set up Cloud SDK' + if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 }} + uses: 'google-github-actions/setup-gcloud@v2' + + - name: 'Copy jacoco files' + if: ${{ env.GOOGLE_SERVICE_ACCOUNT != 0 }} + run: | + mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/ + mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/jacocoTestReport.xml + gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}/{2}', github.repository, env.GITHUB_HEAD_REF_SLUG != '' && env.GITHUB_HEAD_REF_SLUG || github.ref_name, 'jacoco') }} + + # Publish + - name: Publish package to Sonatype + if: github.ref == 'refs/heads/main' + env: + ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }} + ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }} + SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }} + SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }} + run: | + echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties + echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties + echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties + echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg + ./gradlew publishToSonatype + + # Release + - name: Release package to Maven Central + if: startsWith(github.ref, 'refs/tags/v') + env: + ORG_GRADLE_PROJECT_sonatypeUsername: ${{ secrets.SONATYPE_USER }} + ORG_GRADLE_PROJECT_sonatypePassword: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }} + SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }} + SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }} + run: | + echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties + echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties + echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties + echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg + ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository + + # GitHub Release + - name: Create GitHub release + uses: "marvinpinto/action-automatic-releases@latest" + if: startsWith(github.ref, 'refs/tags/v') + with: + repo_token: "${{ secrets.GITHUB_TOKEN }}" + prerelease: false + files: | + build/libs/*.jar + + # Slack + - name: Slack notification + uses: 8398a7/action-slack@v3 + if: ${{ always() && env.SLACK_WEBHOOK_URL != 0 }} + with: + status: ${{ job.status }} + job_name: Check & Publish + fields: repo,message,commit,author,action,eventName,ref,workflow,job,took + username: GitHub Actions + icon_emoji: ':github-actions:' + channel: 'C02DQ1A7JLR' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4be6e5d --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +Thumbs.db +.DS_Store +.gradle +build/ +target/ +out/ +.idea +.vscode +*.iml +*.ipr +*.iws +.project +.settings +.classpath +.attach* +src/test/resources/application-test.yml diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..b7ad1a9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2019 Nigh Tech. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..b419105 --- /dev/null +++ b/README.md @@ -0,0 +1,58 @@ +

+ + Kestra workflow orchestrator + +

+ +

+ Event-Driven Declarative Orchestrator +

+ +
+ Last Version + License + Github star
+Kestra infinitely scalable orchestration and scheduling platform +Slack +
+ +
+ +

+ twitter   + linkedin   +youtube   +

+ +
+

+ + Get started in 4 minutes with Kestra + +

+

Get started with Kestra in 4 minutes.

+ + +# Kestra Plugin JSON + +> A template for creating Kestra plugins + +This repository serves as a general template for creating a new [Kestra](https://github.com/kestra-io/kestra) plugin. It should take only a few minutes! Use this repository as a scaffold to ensure that you've set up the plugin correctly, including unit tests and CI/CD workflows. + +![Kestra orchestrator](https://kestra.io/video.gif) + + +## Documentation +* Full documentation can be found under: [kestra.io/docs](https://kestra.io/docs) +* Documentation for developing a plugin is included in the [Plugin Developer Guide](https://kestra.io/docs/plugin-developer-guide/) + + +## License +Apache 2.0 © [Kestra Technologies](https://kestra.io) + + +## Stay up to date + +We release new versions every month. Give the [main repository](https://github.com/kestra-io/kestra) a star to stay up to date with the latest releases and get notified about future updates. + +![Star the repo](https://kestra.io/star.gif) diff --git a/build.gradle b/build.gradle new file mode 100644 index 0000000..82e854d --- /dev/null +++ b/build.gradle @@ -0,0 +1,254 @@ +plugins { + id 'java-library' + id "idea" + id 'jacoco-report-aggregation' + id "com.adarshr.test-logger" version "4.0.0" apply(false) + id "com.github.johnrengelman.shadow" version "8.1.1" apply(false) + id 'ru.vyarus.java-lib' version '3.0.0' apply(false) + id 'ru.vyarus.github-info' version '2.0.0' apply(false) + id 'signing' + id "io.github.gradle-nexus.publish-plugin" version "2.0.0" + id "com.github.ben-manes.versions" version "0.51.0" + id 'net.researchgate.release' version '3.0.2' +} + +def isBuildSnapshot = version.toString().endsWith("-SNAPSHOT") + +tasks.withType(JavaCompile) { + options.encoding = "UTF-8" + options.compilerArgs.add("-parameters") +} + +group "io.kestra.plugin" + +allprojects { + repositories { + mavenLocal() + mavenCentral() + if (isBuildSnapshot) { + maven { url "https://s01.oss.sonatype.org/content/repositories/snapshots/" } + } + } +} + +subprojects { + group "io.kestra.plugin" + + apply plugin: "java-library" + + sourceCompatibility = 21 + targetCompatibility = 21 + + apply plugin: "idea" + + configurations { + scriptsDriver + api.extendsFrom(scriptsDriver) + } + + dependencies { + // Platform + annotationProcessor enforcedPlatform("io.kestra:platform:$kestraVersion") + implementation enforcedPlatform("io.kestra:platform:$kestraVersion") + api enforcedPlatform("io.kestra:platform:$kestraVersion") + + // lombok + annotationProcessor "org.projectlombok:lombok" + compileOnly "org.projectlombok:lombok" + + compileOnly 'io.micronaut:micronaut-context' + compileOnly "io.micronaut.reactor:micronaut-reactor" + + // Kestra + compileOnly group: "io.kestra", name: "core", version: kestraVersion + compileOnly group: "io.kestra", name: "script", version: kestraVersion + annotationProcessor group: "io.kestra", name: "processor", version: kestraVersion + + // Logs + compileOnly 'org.slf4j:slf4j-api' + compileOnly 'ch.qos.logback:logback-classic:1.5.6' + } +} + +/**********************************************************************************************************************\ + * Test + **********************************************************************************************************************/ +subprojects { + apply plugin: "com.adarshr.test-logger" + + test { + useJUnitPlatform() + } + + testlogger { + theme "mocha-parallel" + showExceptions true + showFullStackTraces true + showStandardStreams true + showPassedStandardStreams false + showSkippedStandardStreams true + } + + dependencies { + // Platform + testAnnotationProcessor enforcedPlatform("io.kestra:platform:$kestraVersion") + testImplementation enforcedPlatform("io.kestra:platform:$kestraVersion") + + // Kestra + testAnnotationProcessor group: "io.kestra", name: "processor", version: kestraVersion + + // lombok + testAnnotationProcessor "org.projectlombok:lombok" + testCompileOnly 'org.projectlombok:lombok' + + // micronaut + testAnnotationProcessor "io.micronaut:micronaut-inject-java" + testAnnotationProcessor "io.micronaut.validation:micronaut-validation-processor" + testImplementation "io.micronaut.test:micronaut-test-junit5" + testImplementation "io.micronaut.reactor:micronaut-reactor" + + // test deps needed only for to have a runner + testImplementation group: "io.kestra", name: "core", version: kestraVersion + testImplementation group: "io.kestra", name: "tests", version: kestraVersion + testImplementation group: "io.kestra", name: "script", version: kestraVersion + testImplementation group: "io.kestra", name: "repository-memory", version: kestraVersion + testImplementation group: "io.kestra", name: "runner-memory", version: kestraVersion + testImplementation group: "io.kestra", name: "storage-local", version: kestraVersion + + // test + testImplementation "org.junit.jupiter:junit-jupiter-engine" + testImplementation 'org.mockito:mockito-junit-jupiter' + testImplementation "org.hamcrest:hamcrest" + testImplementation "org.hamcrest:hamcrest-library" + testImplementation "org.junit-pioneer:junit-pioneer" + testImplementation "org.junit.jupiter:junit-jupiter-params" + } +} + +/**********************************************************************************************************************\ + * Allure Reports + **********************************************************************************************************************/ +subprojects { + dependencies { + testImplementation enforcedPlatform("io.kestra:platform:$kestraVersion") + testImplementation "io.qameta.allure:allure-junit5" + } + + configurations { + agent { + canBeResolved = true + canBeConsumed = true + } + } + + dependencies { + agent "org.aspectj:aspectjweaver:1.9.22.1" + } + + test { + jvmArgs = [ "-javaagent:${configurations.agent.singleFile}" ] + } +} + +/**********************************************************************************************************************\ + * Jacoco + **********************************************************************************************************************/ +subprojects { + apply plugin: 'jacoco' + + test { + finalizedBy jacocoTestReport + } +} + +dependencies { + jacocoAggregation subprojects +} + +tasks.named('testCodeCoverageReport') { + dependsOn = subprojects.test +} + +tasks.named('check') { + dependsOn tasks.named('testCodeCoverageReport', JacocoReport) +} + +/**********************************************************************************************************************\ + * Publish + **********************************************************************************************************************/ +nexusPublishing { + repositoryDescription = "${project.group}:${rootProject.name}:${project.version}" + useStaging = !isBuildSnapshot + repositories { + sonatype { + nexusUrl.set(uri("https://s01.oss.sonatype.org/service/local/")) + snapshotRepositoryUrl.set(uri("https://s01.oss.sonatype.org/content/repositories/snapshots/")) + } + } +} + +subprojects { Project subproject -> + apply plugin: "com.adarshr.test-logger" + apply plugin: "maven-publish" + apply plugin: "signing" + apply plugin: 'ru.vyarus.github-info' + apply plugin: 'ru.vyarus.java-lib' + + github { + user 'kestra-io' + repository 'plugin-transforms' + license 'Apache' + } + + apply plugin: "com.github.johnrengelman.shadow" + + shadowJar { + archiveClassifier.set(null) + mergeServiceFiles() + dependencies { + exclude "com/fasterxml/jackson/**" + exclude "org/slf4j/**" + } + } + + maven.pom { + developers { + developer { + id = "tchiotludo" + name = "Ludovic Dehon" + } + } + } + + tasks.withType(GenerateModuleMetadata).configureEach { + // Suppression this validation error as we want to enforce the Kestra platform + suppressedValidationErrors.add('enforced-platform') + } +} + + + +/**********************************************************************************************************************\ + * Version + **********************************************************************************************************************/ +release { + preCommitText = 'chore(version):' + preTagCommitMessage = 'update to version' + tagCommitMessage = 'tag version' + newVersionCommitMessage = 'update snapshot version' + tagTemplate = 'v${version}' + buildTasks = ['classes'] + git { + requireBranch.set('master') + } +} + +/**********************************************************************************************************************\ + * Dev + **********************************************************************************************************************/ +idea { + module { + downloadJavadoc = true + downloadSources = true + } +} diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 0000000..6b04bf0 --- /dev/null +++ b/gradle.properties @@ -0,0 +1,2 @@ +version=0.18.0-SNAPSHOT +kestraVersion=[0.18,) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..d64cd4917707c1f8861d8cb53dd15194d4248596 GIT binary patch literal 43462 zcma&NWl&^owk(X(xVyW%ySuwf;qI=D6|RlDJ2cR^yEKh!@I- zp9QeisK*rlxC>+~7Dk4IxIRsKBHqdR9b3+fyL=ynHmIDe&|>O*VlvO+%z5;9Z$|DJ zb4dO}-R=MKr^6EKJiOrJdLnCJn>np?~vU-1sSFgPu;pthGwf}bG z(1db%xwr#x)r+`4AGu$j7~u2MpVs3VpLp|mx&;>`0p0vH6kF+D2CY0fVdQOZ@h;A` z{infNyvmFUiu*XG}RNMNwXrbec_*a3N=2zJ|Wh5z* z5rAX$JJR{#zP>KY**>xHTuw?|-Rg|o24V)74HcfVT;WtQHXlE+_4iPE8QE#DUm%x0 zEKr75ur~W%w#-My3Tj`hH6EuEW+8K-^5P62$7Sc5OK+22qj&Pd1;)1#4tKihi=~8C zHiQSst0cpri6%OeaR`PY>HH_;CPaRNty%WTm4{wDK8V6gCZlG@U3$~JQZ;HPvDJcT1V{ z?>H@13MJcCNe#5z+MecYNi@VT5|&UiN1D4ATT+%M+h4c$t;C#UAs3O_q=GxK0}8%8 z8J(_M9bayxN}69ex4dzM_P3oh@ZGREjVvn%%r7=xjkqxJP4kj}5tlf;QosR=%4L5y zWhgejO=vao5oX%mOHbhJ8V+SG&K5dABn6!WiKl{|oPkq(9z8l&Mm%(=qGcFzI=eLu zWc_oCLyf;hVlB@dnwY98?75B20=n$>u3b|NB28H0u-6Rpl((%KWEBOfElVWJx+5yg z#SGqwza7f}$z;n~g%4HDU{;V{gXIhft*q2=4zSezGK~nBgu9-Q*rZ#2f=Q}i2|qOp z!!y4p)4o=LVUNhlkp#JL{tfkhXNbB=Ox>M=n6soptJw-IDI|_$is2w}(XY>a=H52d z3zE$tjPUhWWS+5h=KVH&uqQS=$v3nRs&p$%11b%5qtF}S2#Pc`IiyBIF4%A!;AVoI zXU8-Rpv!DQNcF~(qQnyyMy=-AN~U>#&X1j5BLDP{?K!%h!;hfJI>$mdLSvktEr*89 zdJHvby^$xEX0^l9g$xW-d?J;L0#(`UT~zpL&*cEh$L|HPAu=P8`OQZV!-}l`noSp_ zQ-1$q$R-gDL)?6YaM!=8H=QGW$NT2SeZlb8PKJdc=F-cT@j7Xags+Pr*jPtlHFnf- zh?q<6;)27IdPc^Wdy-mX%2s84C1xZq9Xms+==F4);O`VUASmu3(RlgE#0+#giLh-& zcxm3_e}n4{%|X zJp{G_j+%`j_q5}k{eW&TlP}J2wtZ2^<^E(O)4OQX8FDp6RJq!F{(6eHWSD3=f~(h} zJXCf7=r<16X{pHkm%yzYI_=VDP&9bmI1*)YXZeB}F? z(%QsB5fo*FUZxK$oX~X^69;x~j7ms8xlzpt-T15e9}$4T-pC z6PFg@;B-j|Ywajpe4~bk#S6(fO^|mm1hKOPfA%8-_iGCfICE|=P_~e;Wz6my&)h_~ zkv&_xSAw7AZ%ThYF(4jADW4vg=oEdJGVOs>FqamoL3Np8>?!W#!R-0%2Bg4h?kz5I zKV-rKN2n(vUL%D<4oj@|`eJ>0i#TmYBtYmfla;c!ATW%;xGQ0*TW@PTlGG><@dxUI zg>+3SiGdZ%?5N=8uoLA|$4isK$aJ%i{hECP$bK{J#0W2gQ3YEa zZQ50Stn6hqdfxJ*9#NuSLwKFCUGk@c=(igyVL;;2^wi4o30YXSIb2g_ud$ zgpCr@H0qWtk2hK8Q|&wx)}4+hTYlf;$a4#oUM=V@Cw#!$(nOFFpZ;0lc!qd=c$S}Z zGGI-0jg~S~cgVT=4Vo)b)|4phjStD49*EqC)IPwyeKBLcN;Wu@Aeph;emROAwJ-0< z_#>wVm$)ygH|qyxZaet&(Vf%pVdnvKWJn9`%DAxj3ot;v>S$I}jJ$FLBF*~iZ!ZXE zkvui&p}fI0Y=IDX)mm0@tAd|fEHl~J&K}ZX(Mm3cm1UAuwJ42+AO5@HwYfDH7ipIc zmI;1J;J@+aCNG1M`Btf>YT>~c&3j~Qi@Py5JT6;zjx$cvOQW@3oQ>|}GH?TW-E z1R;q^QFjm5W~7f}c3Ww|awg1BAJ^slEV~Pk`Kd`PS$7;SqJZNj->it4DW2l15}xP6 zoCl$kyEF%yJni0(L!Z&14m!1urXh6Btj_5JYt1{#+H8w?5QI%% zo-$KYWNMJVH?Hh@1n7OSu~QhSswL8x0=$<8QG_zepi_`y_79=nK=_ZP_`Em2UI*tyQoB+r{1QYZCpb?2OrgUw#oRH$?^Tj!Req>XiE#~B|~ z+%HB;=ic+R@px4Ld8mwpY;W^A%8%l8$@B@1m5n`TlKI6bz2mp*^^^1mK$COW$HOfp zUGTz-cN9?BGEp}5A!mDFjaiWa2_J2Iq8qj0mXzk; z66JBKRP{p%wN7XobR0YjhAuW9T1Gw3FDvR5dWJ8ElNYF94eF3ebu+QwKjtvVu4L zI9ip#mQ@4uqVdkl-TUQMb^XBJVLW(-$s;Nq;@5gr4`UfLgF$adIhd?rHOa%D);whv z=;krPp~@I+-Z|r#s3yCH+c1US?dnm+C*)r{m+86sTJusLdNu^sqLrfWed^ndHXH`m zd3#cOe3>w-ga(Dus_^ppG9AC>Iq{y%%CK+Cro_sqLCs{VLuK=dev>OL1dis4(PQ5R zcz)>DjEkfV+MO;~>VUlYF00SgfUo~@(&9$Iy2|G0T9BSP?&T22>K46D zL*~j#yJ?)^*%J3!16f)@Y2Z^kS*BzwfAQ7K96rFRIh>#$*$_Io;z>ux@}G98!fWR@ zGTFxv4r~v)Gsd|pF91*-eaZ3Qw1MH$K^7JhWIdX%o$2kCbvGDXy)a?@8T&1dY4`;L z4Kn+f%SSFWE_rpEpL9bnlmYq`D!6F%di<&Hh=+!VI~j)2mfil03T#jJ_s?}VV0_hp z7T9bWxc>Jm2Z0WMU?`Z$xE74Gu~%s{mW!d4uvKCx@WD+gPUQ zV0vQS(Ig++z=EHN)BR44*EDSWIyT~R4$FcF*VEY*8@l=218Q05D2$|fXKFhRgBIEE zdDFB}1dKkoO^7}{5crKX!p?dZWNz$m>1icsXG2N+((x0OIST9Zo^DW_tytvlwXGpn zs8?pJXjEG;T@qrZi%#h93?FP$!&P4JA(&H61tqQi=opRzNpm zkrG}$^t9&XduK*Qa1?355wd8G2CI6QEh@Ua>AsD;7oRUNLPb76m4HG3K?)wF~IyS3`fXuNM>${?wmB zpVz;?6_(Fiadfd{vUCBM*_kt$+F3J+IojI;9L(gc9n3{sEZyzR9o!_mOwFC#tQ{Q~ zP3-`#uK#tP3Q7~Q;4H|wjZHO8h7e4IuBxl&vz2w~D8)w=Wtg31zpZhz%+kzSzL*dV zwp@{WU4i;hJ7c2f1O;7Mz6qRKeASoIv0_bV=i@NMG*l<#+;INk-^`5w@}Dj~;k=|}qM1vq_P z|GpBGe_IKq|LNy9SJhKOQ$c=5L{Dv|Q_lZl=-ky*BFBJLW9&y_C|!vyM~rQx=!vun z?rZJQB5t}Dctmui5i31C_;_}CEn}_W%>oSXtt>@kE1=JW*4*v4tPp;O6 zmAk{)m!)}34pTWg8{i>($%NQ(Tl;QC@J@FfBoc%Gr&m560^kgSfodAFrIjF}aIw)X zoXZ`@IsMkc8_=w%-7`D6Y4e*CG8k%Ud=GXhsTR50jUnm+R*0A(O3UKFg0`K;qp1bl z7``HN=?39ic_kR|^R^~w-*pa?Vj#7|e9F1iRx{GN2?wK!xR1GW!qa=~pjJb-#u1K8 zeR?Y2i-pt}yJq;SCiVHODIvQJX|ZJaT8nO+(?HXbLefulKKgM^B(UIO1r+S=7;kLJ zcH}1J=Px2jsh3Tec&v8Jcbng8;V-`#*UHt?hB(pmOipKwf3Lz8rG$heEB30Sg*2rx zV<|KN86$soN(I!BwO`1n^^uF2*x&vJ$2d$>+`(romzHP|)K_KkO6Hc>_dwMW-M(#S zK(~SiXT1@fvc#U+?|?PniDRm01)f^#55;nhM|wi?oG>yBsa?~?^xTU|fX-R(sTA+5 zaq}-8Tx7zrOy#3*JLIIVsBmHYLdD}!0NP!+ITW+Thn0)8SS!$@)HXwB3tY!fMxc#1 zMp3H?q3eD?u&Njx4;KQ5G>32+GRp1Ee5qMO0lZjaRRu&{W<&~DoJNGkcYF<5(Ab+J zgO>VhBl{okDPn78<%&e2mR{jwVCz5Og;*Z;;3%VvoGo_;HaGLWYF7q#jDX=Z#Ml`H z858YVV$%J|e<1n`%6Vsvq7GmnAV0wW4$5qQ3uR@1i>tW{xrl|ExywIc?fNgYlA?C5 zh$ezAFb5{rQu6i7BSS5*J-|9DQ{6^BVQ{b*lq`xS@RyrsJN?-t=MTMPY;WYeKBCNg z^2|pN!Q^WPJuuO4!|P@jzt&tY1Y8d%FNK5xK(!@`jO2aEA*4 zkO6b|UVBipci?){-Ke=+1;mGlND8)6+P;8sq}UXw2hn;fc7nM>g}GSMWu&v&fqh

iViYT=fZ(|3Ox^$aWPp4a8h24tD<|8-!aK0lHgL$N7Efw}J zVIB!7=T$U`ao1?upi5V4Et*-lTG0XvExbf!ya{cua==$WJyVG(CmA6Of*8E@DSE%L z`V^$qz&RU$7G5mg;8;=#`@rRG`-uS18$0WPN@!v2d{H2sOqP|!(cQ@ zUHo!d>>yFArLPf1q`uBvY32miqShLT1B@gDL4XoVTK&@owOoD)OIHXrYK-a1d$B{v zF^}8D3Y^g%^cnvScOSJR5QNH+BI%d|;J;wWM3~l>${fb8DNPg)wrf|GBP8p%LNGN# z3EaIiItgwtGgT&iYCFy9-LG}bMI|4LdmmJt@V@% zb6B)1kc=T)(|L@0;wr<>=?r04N;E&ef+7C^`wPWtyQe(*pD1pI_&XHy|0gIGHMekd zF_*M4yi6J&Z4LQj65)S zXwdM{SwUo%3SbPwFsHgqF@V|6afT|R6?&S;lw=8% z3}@9B=#JI3@B*#4s!O))~z zc>2_4Q_#&+5V`GFd?88^;c1i7;Vv_I*qt!_Yx*n=;rj!82rrR2rQ8u5(Ejlo{15P% zs~!{%XJ>FmJ})H^I9bn^Re&38H{xA!0l3^89k(oU;bZWXM@kn$#aoS&Y4l^-WEn-fH39Jb9lA%s*WsKJQl?n9B7_~P z-XM&WL7Z!PcoF6_D>V@$CvUIEy=+Z&0kt{szMk=f1|M+r*a43^$$B^MidrT0J;RI` z(?f!O<8UZkm$_Ny$Hth1J#^4ni+im8M9mr&k|3cIgwvjAgjH z8`N&h25xV#v*d$qBX5jkI|xOhQn!>IYZK7l5#^P4M&twe9&Ey@@GxYMxBZq2e7?`q z$~Szs0!g{2fGcp9PZEt|rdQ6bhAgpcLHPz?f-vB?$dc*!9OL?Q8mn7->bFD2Si60* z!O%y)fCdMSV|lkF9w%x~J*A&srMyYY3{=&$}H zGQ4VG_?$2X(0|vT0{=;W$~icCI{b6W{B!Q8xdGhF|D{25G_5_+%s(46lhvNLkik~R z>nr(&C#5wwOzJZQo9m|U<;&Wk!_#q|V>fsmj1g<6%hB{jGoNUPjgJslld>xmODzGjYc?7JSuA?A_QzjDw5AsRgi@Y|Z0{F{!1=!NES-#*f^s4l0Hu zz468))2IY5dmD9pa*(yT5{EyP^G>@ZWumealS-*WeRcZ}B%gxq{MiJ|RyX-^C1V=0 z@iKdrGi1jTe8Ya^x7yyH$kBNvM4R~`fbPq$BzHum-3Zo8C6=KW@||>zsA8-Y9uV5V z#oq-f5L5}V<&wF4@X@<3^C%ptp6+Ce)~hGl`kwj)bsAjmo_GU^r940Z-|`<)oGnh7 zFF0Tde3>ui?8Yj{sF-Z@)yQd~CGZ*w-6p2U<8}JO-sRsVI5dBji`01W8A&3$?}lxBaC&vn0E$c5tW* zX>5(zzZ=qn&!J~KdsPl;P@bmA-Pr8T*)eh_+Dv5=Ma|XSle6t(k8qcgNyar{*ReQ8 zTXwi=8vr>!3Ywr+BhggHDw8ke==NTQVMCK`$69fhzEFB*4+H9LIvdt-#IbhZvpS}} zO3lz;P?zr0*0$%-Rq_y^k(?I{Mk}h@w}cZpMUp|ucs55bcloL2)($u%mXQw({Wzc~ z;6nu5MkjP)0C(@%6Q_I_vsWrfhl7Zpoxw#WoE~r&GOSCz;_ro6i(^hM>I$8y>`!wW z*U^@?B!MMmb89I}2(hcE4zN2G^kwyWCZp5JG>$Ez7zP~D=J^LMjSM)27_0B_X^C(M z`fFT+%DcKlu?^)FCK>QzSnV%IsXVcUFhFdBP!6~se&xxrIxsvySAWu++IrH;FbcY$ z2DWTvSBRfLwdhr0nMx+URA$j3i7_*6BWv#DXfym?ZRDcX9C?cY9sD3q)uBDR3uWg= z(lUIzB)G$Hr!){>E{s4Dew+tb9kvToZp-1&c?y2wn@Z~(VBhqz`cB;{E4(P3N2*nJ z_>~g@;UF2iG{Kt(<1PyePTKahF8<)pozZ*xH~U-kfoAayCwJViIrnqwqO}7{0pHw$ zs2Kx?s#vQr7XZ264>5RNKSL8|Ty^=PsIx^}QqOOcfpGUU4tRkUc|kc7-!Ae6!+B{o~7nFpm3|G5^=0#Bnm6`V}oSQlrX(u%OWnC zoLPy&Q;1Jui&7ST0~#+}I^&?vcE*t47~Xq#YwvA^6^} z`WkC)$AkNub|t@S!$8CBlwbV~?yp&@9h{D|3z-vJXgzRC5^nYm+PyPcgRzAnEi6Q^gslXYRv4nycsy-SJu?lMps-? zV`U*#WnFsdPLL)Q$AmD|0`UaC4ND07+&UmOu!eHruzV|OUox<+Jl|Mr@6~C`T@P%s zW7sgXLF2SSe9Fl^O(I*{9wsFSYb2l%-;&Pi^dpv!{)C3d0AlNY6!4fgmSgj_wQ*7Am7&$z;Jg&wgR-Ih;lUvWS|KTSg!&s_E9_bXBkZvGiC6bFKDWZxsD$*NZ#_8bl zG1P-#@?OQzED7@jlMJTH@V!6k;W>auvft)}g zhoV{7$q=*;=l{O>Q4a@ ziMjf_u*o^PsO)#BjC%0^h>Xp@;5$p{JSYDt)zbb}s{Kbt!T*I@Pk@X0zds6wsefuU zW$XY%yyRGC94=6mf?x+bbA5CDQ2AgW1T-jVAJbm7K(gp+;v6E0WI#kuACgV$r}6L? zd|Tj?^%^*N&b>Dd{Wr$FS2qI#Ucs1yd4N+RBUQiSZGujH`#I)mG&VKoDh=KKFl4=G z&MagXl6*<)$6P}*Tiebpz5L=oMaPrN+caUXRJ`D?=K9!e0f{@D&cZLKN?iNP@X0aF zE(^pl+;*T5qt?1jRC=5PMgV!XNITRLS_=9{CJExaQj;lt!&pdzpK?8p>%Mb+D z?yO*uSung=-`QQ@yX@Hyd4@CI^r{2oiu`%^bNkz+Nkk!IunjwNC|WcqvX~k=><-I3 zDQdbdb|!v+Iz01$w@aMl!R)koD77Xp;eZwzSl-AT zr@Vu{=xvgfq9akRrrM)}=!=xcs+U1JO}{t(avgz`6RqiiX<|hGG1pmop8k6Q+G_mv zJv|RfDheUp2L3=^C=4aCBMBn0aRCU(DQwX-W(RkRwmLeuJYF<0urcaf(=7)JPg<3P zQs!~G)9CT18o!J4{zX{_e}4eS)U-E)0FAt}wEI(c0%HkxgggW;(1E=>J17_hsH^sP z%lT0LGgbUXHx-K*CI-MCrP66UP0PvGqM$MkeLyqHdbgP|_Cm!7te~b8p+e6sQ_3k| zVcwTh6d83ltdnR>D^)BYQpDKlLk3g0Hdcgz2}%qUs9~~Rie)A-BV1mS&naYai#xcZ z(d{8=-LVpTp}2*y)|gR~;qc7fp26}lPcLZ#=JpYcn3AT9(UIdOyg+d(P5T7D&*P}# zQCYplZO5|7+r19%9e`v^vfSS1sbX1c%=w1;oyruXB%Kl$ACgKQ6=qNWLsc=28xJjg zwvsI5-%SGU|3p>&zXVl^vVtQT3o-#$UT9LI@Npz~6=4!>mc431VRNN8od&Ul^+G_kHC`G=6WVWM z%9eWNyy(FTO|A+@x}Ou3CH)oi;t#7rAxdIXfNFwOj_@Y&TGz6P_sqiB`Q6Lxy|Q{`|fgmRG(k+!#b*M+Z9zFce)f-7;?Km5O=LHV9f9_87; zF7%R2B+$?@sH&&-$@tzaPYkw0;=i|;vWdI|Wl3q_Zu>l;XdIw2FjV=;Mq5t1Q0|f< zs08j54Bp`3RzqE=2enlkZxmX6OF+@|2<)A^RNQpBd6o@OXl+i)zO%D4iGiQNuXd+zIR{_lb96{lc~bxsBveIw6umhShTX+3@ZJ=YHh@ zWY3(d0azg;7oHn>H<>?4@*RQbi>SmM=JrHvIG(~BrvI)#W(EAeO6fS+}mxxcc+X~W6&YVl86W9WFSS}Vz-f9vS?XUDBk)3TcF z8V?$4Q)`uKFq>xT=)Y9mMFVTUk*NIA!0$?RP6Ig0TBmUFrq*Q-Agq~DzxjStQyJ({ zBeZ;o5qUUKg=4Hypm|}>>L=XKsZ!F$yNTDO)jt4H0gdQ5$f|d&bnVCMMXhNh)~mN z@_UV6D7MVlsWz+zM+inZZp&P4fj=tm6fX)SG5H>OsQf_I8c~uGCig$GzuwViK54bcgL;VN|FnyQl>Ed7(@>=8$a_UKIz|V6CeVSd2(P z0Uu>A8A+muM%HLFJQ9UZ5c)BSAv_zH#1f02x?h9C}@pN@6{>UiAp>({Fn(T9Q8B z^`zB;kJ5b`>%dLm+Ol}ty!3;8f1XDSVX0AUe5P#@I+FQ-`$(a;zNgz)4x5hz$Hfbg z!Q(z26wHLXko(1`;(BAOg_wShpX0ixfWq3ponndY+u%1gyX)_h=v1zR#V}#q{au6; z!3K=7fQwnRfg6FXtNQmP>`<;!N137paFS%y?;lb1@BEdbvQHYC{976l`cLqn;b8lp zIDY>~m{gDj(wfnK!lpW6pli)HyLEiUrNc%eXTil|F2s(AY+LW5hkKb>TQ3|Q4S9rr zpDs4uK_co6XPsn_z$LeS{K4jFF`2>U`tbgKdyDne`xmR<@6AA+_hPNKCOR-Zqv;xk zu5!HsBUb^!4uJ7v0RuH-7?l?}b=w5lzzXJ~gZcxRKOovSk@|#V+MuX%Y+=;14i*%{)_gSW9(#4%)AV#3__kac1|qUy!uyP{>?U#5wYNq}y$S9pCc zFc~4mgSC*G~j0u#qqp9 z${>3HV~@->GqEhr_Xwoxq?Hjn#=s2;i~g^&Hn|aDKpA>Oc%HlW(KA1?BXqpxB;Ydx)w;2z^MpjJ(Qi(X!$5RC z*P{~%JGDQqojV>2JbEeCE*OEu!$XJ>bWA9Oa_Hd;y)F%MhBRi*LPcdqR8X`NQ&1L# z5#9L*@qxrx8n}LfeB^J{%-?SU{FCwiWyHp682F+|pa+CQa3ZLzBqN1{)h4d6+vBbV zC#NEbQLC;}me3eeYnOG*nXOJZEU$xLZ1<1Y=7r0(-U0P6-AqwMAM`a(Ed#7vJkn6plb4eI4?2y3yOTGmmDQ!z9`wzbf z_OY#0@5=bnep;MV0X_;;SJJWEf^E6Bd^tVJ9znWx&Ks8t*B>AM@?;D4oWUGc z!H*`6d7Cxo6VuyS4Eye&L1ZRhrRmN6Lr`{NL(wDbif|y&z)JN>Fl5#Wi&mMIr5i;x zBx}3YfF>>8EC(fYnmpu~)CYHuHCyr5*`ECap%t@y=jD>!_%3iiE|LN$mK9>- zHdtpy8fGZtkZF?%TW~29JIAfi2jZT8>OA7=h;8T{{k?c2`nCEx9$r zS+*&vt~2o^^J+}RDG@+9&M^K*z4p{5#IEVbz`1%`m5c2};aGt=V?~vIM}ZdPECDI)47|CWBCfDWUbxBCnmYivQ*0Nu_xb*C>~C9(VjHM zxe<*D<#dQ8TlpMX2c@M<9$w!RP$hpG4cs%AI){jp*Sj|*`m)5(Bw*A0$*i-(CA5#%>a)$+jI2C9r6|(>J8InryENI z$NohnxDUB;wAYDwrb*!N3noBTKPpPN}~09SEL18tkG zxgz(RYU_;DPT{l?Q$+eaZaxnsWCA^ds^0PVRkIM%bOd|G2IEBBiz{&^JtNsODs;5z zICt_Zj8wo^KT$7Bg4H+y!Df#3mbl%%?|EXe!&(Vmac1DJ*y~3+kRKAD=Ovde4^^%~ zw<9av18HLyrf*_>Slp;^i`Uy~`mvBjZ|?Ad63yQa#YK`4+c6;pW4?XIY9G1(Xh9WO8{F-Aju+nS9Vmv=$Ac0ienZ+p9*O%NG zMZKy5?%Z6TAJTE?o5vEr0r>f>hb#2w2U3DL64*au_@P!J!TL`oH2r*{>ffu6|A7tv zL4juf$DZ1MW5ZPsG!5)`k8d8c$J$o;%EIL0va9&GzWvkS%ZsGb#S(?{!UFOZ9<$a| zY|a+5kmD5N&{vRqkgY>aHsBT&`rg|&kezoD)gP0fsNYHsO#TRc_$n6Lf1Z{?+DLziXlHrq4sf(!>O{?Tj;Eh@%)+nRE_2VxbN&&%%caU#JDU%vL3}Cb zsb4AazPI{>8H&d=jUaZDS$-0^AxE@utGs;-Ez_F(qC9T=UZX=>ok2k2 ziTn{K?y~a5reD2A)P${NoI^>JXn>`IeArow(41c-Wm~)wiryEP(OS{YXWi7;%dG9v zI?mwu1MxD{yp_rrk!j^cKM)dc4@p4Ezyo%lRN|XyD}}>v=Xoib0gOcdXrQ^*61HNj z=NP|pd>@yfvr-=m{8$3A8TQGMTE7g=z!%yt`8`Bk-0MMwW~h^++;qyUP!J~ykh1GO z(FZ59xuFR$(WE;F@UUyE@Sp>`aVNjyj=Ty>_Vo}xf`e7`F;j-IgL5`1~-#70$9_=uBMq!2&1l zomRgpD58@)YYfvLtPW}{C5B35R;ZVvB<<#)x%srmc_S=A7F@DW8>QOEGwD6suhwCg z>Pa+YyULhmw%BA*4yjDp|2{!T98~<6Yfd(wo1mQ!KWwq0eg+6)o1>W~f~kL<-S+P@$wx*zeI|1t7z#Sxr5 zt6w+;YblPQNplq4Z#T$GLX#j6yldXAqj>4gAnnWtBICUnA&-dtnlh=t0Ho_vEKwV` z)DlJi#!@nkYV#$!)@>udAU*hF?V`2$Hf=V&6PP_|r#Iv*J$9)pF@X3`k;5})9^o4y z&)~?EjX5yX12O(BsFy-l6}nYeuKkiq`u9145&3Ssg^y{5G3Pse z9w(YVa0)N-fLaBq1`P!_#>SS(8fh_5!f{UrgZ~uEdeMJIz7DzI5!NHHqQtm~#CPij z?=N|J>nPR6_sL7!f4hD_|KH`vf8(Wpnj-(gPWH+ZvID}%?~68SwhPTC3u1_cB`otq z)U?6qo!ZLi5b>*KnYHWW=3F!p%h1;h{L&(Q&{qY6)_qxNfbP6E3yYpW!EO+IW3?@J z);4>g4gnl^8klu7uA>eGF6rIGSynacogr)KUwE_R4E5Xzi*Qir@b-jy55-JPC8c~( zo!W8y9OGZ&`xmc8;=4-U9=h{vCqfCNzYirONmGbRQlR`WWlgnY+1wCXbMz&NT~9*| z6@FrzP!LX&{no2!Ln_3|I==_4`@}V?4a;YZKTdw;vT<+K+z=uWbW(&bXEaWJ^W8Td z-3&1bY^Z*oM<=M}LVt>_j+p=2Iu7pZmbXrhQ_k)ysE9yXKygFNw$5hwDn(M>H+e1&9BM5!|81vd%r%vEm zqxY3?F@fb6O#5UunwgAHR9jp_W2zZ}NGp2%mTW@(hz7$^+a`A?mb8|_G*GNMJ) zjqegXQio=i@AINre&%ofexAr95aop5C+0MZ0m-l=MeO8m3epm7U%vZB8+I+C*iNFM z#T3l`gknX;D$-`2XT^Cg*vrv=RH+P;_dfF++cP?B_msQI4j+lt&rX2)3GaJx%W*Nn zkML%D{z5tpHH=dksQ*gzc|}gzW;lwAbxoR07VNgS*-c3d&8J|;@3t^ zVUz*J*&r7DFRuFVDCJDK8V9NN5hvpgGjwx+5n)qa;YCKe8TKtdnh{I7NU9BCN!0dq zczrBk8pE{{@vJa9ywR@mq*J=v+PG;?fwqlJVhijG!3VmIKs>9T6r7MJpC)m!Tc#>g zMtVsU>wbwFJEfwZ{vB|ZlttNe83)$iz`~#8UJ^r)lJ@HA&G#}W&ZH*;k{=TavpjWE z7hdyLZPf*X%Gm}i`Y{OGeeu^~nB8=`{r#TUrM-`;1cBvEd#d!kPqIgYySYhN-*1;L z^byj%Yi}Gx)Wnkosi337BKs}+5H5dth1JA{Ir-JKN$7zC)*}hqeoD(WfaUDPT>0`- z(6sa0AoIqASwF`>hP}^|)a_j2s^PQn*qVC{Q}htR z5-)duBFXT_V56-+UohKXlq~^6uf!6sA#ttk1o~*QEy_Y-S$gAvq47J9Vtk$5oA$Ct zYhYJ@8{hsC^98${!#Ho?4y5MCa7iGnfz}b9jE~h%EAAv~Qxu)_rAV;^cygV~5r_~?l=B`zObj7S=H=~$W zPtI_m%g$`kL_fVUk9J@>EiBH zOO&jtn~&`hIFMS5S`g8w94R4H40mdNUH4W@@XQk1sr17b{@y|JB*G9z1|CrQjd+GX z6+KyURG3;!*BQrentw{B2R&@2&`2}n(z-2&X7#r!{yg@Soy}cRD~j zj9@UBW+N|4HW4AWapy4wfUI- zZ`gSL6DUlgj*f1hSOGXG0IVH8HxK?o2|3HZ;KW{K+yPAlxtb)NV_2AwJm|E)FRs&& z=c^e7bvUsztY|+f^k7NXs$o1EUq>cR7C0$UKi6IooHWlK_#?IWDkvywnzg&ThWo^? z2O_N{5X39#?eV9l)xI(>@!vSB{DLt*oY!K1R8}_?%+0^C{d9a%N4 zoxHVT1&Lm|uDX%$QrBun5e-F`HJ^T$ zmzv)p@4ZHd_w9!%Hf9UYNvGCw2TTTbrj9pl+T9%-_-}L(tES>Or-}Z4F*{##n3~L~TuxjirGuIY#H7{%$E${?p{Q01 zi6T`n;rbK1yIB9jmQNycD~yZq&mbIsFWHo|ZAChSFPQa<(%d8mGw*V3fh|yFoxOOiWJd(qvVb!Z$b88cg->N=qO*4k~6;R==|9ihg&riu#P~s4Oap9O7f%crSr^rljeIfXDEg>wi)&v*a%7zpz<9w z*r!3q9J|390x`Zk;g$&OeN&ctp)VKRpDSV@kU2Q>jtok($Y-*x8_$2piTxun81@vt z!Vj?COa0fg2RPXMSIo26T=~0d`{oGP*eV+$!0I<(4azk&Vj3SiG=Q!6mX0p$z7I}; z9BJUFgT-K9MQQ-0@Z=^7R<{bn2Fm48endsSs`V7_@%8?Bxkqv>BDoVcj?K#dV#uUP zL1ND~?D-|VGKe3Rw_7-Idpht>H6XRLh*U7epS6byiGvJpr%d}XwfusjH9g;Z98H`x zyde%%5mhGOiL4wljCaWCk-&uE4_OOccb9c!ZaWt4B(wYl!?vyzl%7n~QepN&eFUrw zFIOl9c({``6~QD+43*_tzP{f2x41h(?b43^y6=iwyB)2os5hBE!@YUS5?N_tXd=h( z)WE286Fbd>R4M^P{!G)f;h<3Q>Fipuy+d2q-)!RyTgt;wr$(?9ox3;q+{E*ZQHhOn;lM`cjnu9 zXa48ks-v(~b*;MAI<>YZH(^NV8vjb34beE<_cwKlJoR;k6lJNSP6v}uiyRD?|0w+X@o1ONrH8a$fCxXpf? z?$DL0)7|X}Oc%h^zrMKWc-NS9I0Utu@>*j}b@tJ=ixQSJ={4@854wzW@E>VSL+Y{i z#0b=WpbCZS>kUCO_iQz)LoE>P5LIG-hv9E+oG}DtlIDF>$tJ1aw9^LuhLEHt?BCj& z(O4I8v1s#HUi5A>nIS-JK{v!7dJx)^Yg%XjNmlkWAq2*cv#tHgz`Y(bETc6CuO1VkN^L-L3j_x<4NqYb5rzrLC-7uOv z!5e`GZt%B782C5-fGnn*GhDF$%(qP<74Z}3xx+{$4cYKy2ikxI7B2N+2r07DN;|-T->nU&!=Cm#rZt%O_5c&1Z%nlWq3TKAW0w zQqemZw_ue--2uKQsx+niCUou?HjD`xhEjjQd3%rrBi82crq*~#uA4+>vR<_S{~5ce z-2EIl?~s z1=GVL{NxP1N3%=AOaC}j_Fv=ur&THz zyO!d9kHq|c73kpq`$+t+8Bw7MgeR5~`d7ChYyGCBWSteTB>8WAU(NPYt2Dk`@#+}= zI4SvLlyk#pBgVigEe`?NG*vl7V6m+<}%FwPV=~PvvA)=#ths==DRTDEYh4V5}Cf$z@#;< zyWfLY_5sP$gc3LLl2x+Ii)#b2nhNXJ{R~vk`s5U7Nyu^3yFg&D%Txwj6QezMX`V(x z=C`{76*mNb!qHHs)#GgGZ_7|vkt9izl_&PBrsu@}L`X{95-2jf99K)0=*N)VxBX2q z((vkpP2RneSIiIUEnGb?VqbMb=Zia+rF~+iqslydE34cSLJ&BJW^3knX@M;t*b=EA zNvGzv41Ld_T+WT#XjDB840vovUU^FtN_)G}7v)1lPetgpEK9YS^OWFkPoE{ovj^=@ zO9N$S=G$1ecndT_=5ehth2Lmd1II-PuT~C9`XVePw$y8J#dpZ?Tss<6wtVglm(Ok7 z3?^oi@pPio6l&!z8JY(pJvG=*pI?GIOu}e^EB6QYk$#FJQ%^AIK$I4epJ+9t?KjqA+bkj&PQ*|vLttme+`9G=L% ziadyMw_7-M)hS(3E$QGNCu|o23|%O+VN7;Qggp?PB3K-iSeBa2b}V4_wY`G1Jsfz4 z9|SdB^;|I8E8gWqHKx!vj_@SMY^hLEIbSMCuE?WKq=c2mJK z8LoG-pnY!uhqFv&L?yEuxo{dpMTsmCn)95xanqBrNPTgXP((H$9N${Ow~Is-FBg%h z53;|Y5$MUN)9W2HBe2TD`ct^LHI<(xWrw}$qSoei?}s)&w$;&!14w6B6>Yr6Y8b)S z0r71`WmAvJJ`1h&poLftLUS6Ir zC$bG9!Im_4Zjse)#K=oJM9mHW1{%l8sz$1o?ltdKlLTxWWPB>Vk22czVt|1%^wnN@*!l)}?EgtvhC>vlHm^t+ogpgHI1_$1ox9e;>0!+b(tBrmXRB`PY1vp-R**8N7 zGP|QqI$m(Rdu#=(?!(N}G9QhQ%o!aXE=aN{&wtGP8|_qh+7a_j_sU5|J^)vxq;# zjvzLn%_QPHZZIWu1&mRAj;Sa_97p_lLq_{~j!M9N^1yp3U_SxRqK&JnR%6VI#^E12 z>CdOVI^_9aPK2eZ4h&^{pQs}xsijXgFYRIxJ~N7&BB9jUR1fm!(xl)mvy|3e6-B3j zJn#ajL;bFTYJ2+Q)tDjx=3IklO@Q+FFM}6UJr6km7hj7th9n_&JR7fnqC!hTZoM~T zBeaVFp%)0cbPhejX<8pf5HyRUj2>aXnXBqDJe73~J%P(2C?-RT{c3NjE`)om! zl$uewSgWkE66$Kb34+QZZvRn`fob~Cl9=cRk@Es}KQm=?E~CE%spXaMO6YmrMl%9Q zlA3Q$3|L1QJ4?->UjT&CBd!~ru{Ih^in&JXO=|<6J!&qp zRe*OZ*cj5bHYlz!!~iEKcuE|;U4vN1rk$xq6>bUWD*u(V@8sG^7>kVuo(QL@Ki;yL zWC!FT(q{E8#on>%1iAS0HMZDJg{Z{^!De(vSIq&;1$+b)oRMwA3nc3mdTSG#3uYO_ z>+x;7p4I;uHz?ZB>dA-BKl+t-3IB!jBRgdvAbW!aJ(Q{aT>+iz?91`C-xbe)IBoND z9_Xth{6?(y3rddwY$GD65IT#f3<(0o#`di{sh2gm{dw*#-Vnc3r=4==&PU^hCv$qd zjw;>i&?L*Wq#TxG$mFIUf>eK+170KG;~+o&1;Tom9}}mKo23KwdEM6UonXgc z!6N(@k8q@HPw{O8O!lAyi{rZv|DpgfU{py+j(X_cwpKqcalcqKIr0kM^%Br3SdeD> zHSKV94Yxw;pjzDHo!Q?8^0bb%L|wC;4U^9I#pd5O&eexX+Im{ z?jKnCcsE|H?{uGMqVie_C~w7GX)kYGWAg%-?8|N_1#W-|4F)3YTDC+QSq1s!DnOML3@d`mG%o2YbYd#jww|jD$gotpa)kntakp#K;+yo-_ZF9qrNZw<%#C zuPE@#3RocLgPyiBZ+R_-FJ_$xP!RzWm|aN)S+{$LY9vvN+IW~Kf3TsEIvP+B9Mtm! zpfNNxObWQpLoaO&cJh5>%slZnHl_Q~(-Tfh!DMz(dTWld@LG1VRF`9`DYKhyNv z2pU|UZ$#_yUx_B_|MxUq^glT}O5Xt(Vm4Mr02><%C)@v;vPb@pT$*yzJ4aPc_FZ3z z3}PLoMBIM>q_9U2rl^sGhk1VUJ89=*?7|v`{!Z{6bqFMq(mYiA?%KbsI~JwuqVA9$H5vDE+VocjX+G^%bieqx->s;XWlKcuv(s%y%D5Xbc9+ zc(_2nYS1&^yL*ey664&4`IoOeDIig}y-E~_GS?m;D!xv5-xwz+G`5l6V+}CpeJDi^ z%4ed$qowm88=iYG+(`ld5Uh&>Dgs4uPHSJ^TngXP_V6fPyl~>2bhi20QB%lSd#yYn zO05?KT1z@?^-bqO8Cg`;ft>ilejsw@2%RR7;`$Vs;FmO(Yr3Fp`pHGr@P2hC%QcA|X&N2Dn zYf`MqXdHi%cGR@%y7Rg7?d3?an){s$zA{!H;Ie5exE#c~@NhQUFG8V=SQh%UxUeiV zd7#UcYqD=lk-}sEwlpu&H^T_V0{#G?lZMxL7ih_&{(g)MWBnCZxtXg znr#}>U^6!jA%e}@Gj49LWG@*&t0V>Cxc3?oO7LSG%~)Y5}f7vqUUnQ;STjdDU}P9IF9d9<$;=QaXc zL1^X7>fa^jHBu_}9}J~#-oz3Oq^JmGR#?GO7b9a(=R@fw@}Q{{@`Wy1vIQ#Bw?>@X z-_RGG@wt|%u`XUc%W{J z>iSeiz8C3H7@St3mOr_mU+&bL#Uif;+Xw-aZdNYUpdf>Rvu0i0t6k*}vwU`XNO2he z%miH|1tQ8~ZK!zmL&wa3E;l?!!XzgV#%PMVU!0xrDsNNZUWKlbiOjzH-1Uoxm8E#r`#2Sz;-o&qcqB zC-O_R{QGuynW14@)7&@yw1U}uP(1cov)twxeLus0s|7ayrtT8c#`&2~Fiu2=R;1_4bCaD=*E@cYI>7YSnt)nQc zohw5CsK%m?8Ack)qNx`W0_v$5S}nO|(V|RZKBD+btO?JXe|~^Qqur%@eO~<8-L^9d z=GA3-V14ng9L29~XJ>a5k~xT2152zLhM*@zlp2P5Eu}bywkcqR;ISbas&#T#;HZSf z2m69qTV(V@EkY(1Dk3`}j)JMo%ZVJ*5eB zYOjIisi+igK0#yW*gBGj?@I{~mUOvRFQR^pJbEbzFxTubnrw(Muk%}jI+vXmJ;{Q6 zrSobKD>T%}jV4Ub?L1+MGOD~0Ir%-`iTnWZN^~YPrcP5y3VMAzQ+&en^VzKEb$K!Q z<7Dbg&DNXuow*eD5yMr+#08nF!;%4vGrJI++5HdCFcGLfMW!KS*Oi@=7hFwDG!h2< zPunUEAF+HncQkbfFj&pbzp|MU*~60Z(|Ik%Tn{BXMN!hZOosNIseT?R;A`W?=d?5X zK(FB=9mZusYahp|K-wyb={rOpdn=@;4YI2W0EcbMKyo~-#^?h`BA9~o285%oY zfifCh5Lk$SY@|2A@a!T2V+{^!psQkx4?x0HSV`(w9{l75QxMk!)U52Lbhn{8ol?S) zCKo*7R(z!uk<6*qO=wh!Pul{(qq6g6xW;X68GI_CXp`XwO zxuSgPRAtM8K7}5E#-GM!*ydOOG_{A{)hkCII<|2=ma*71ci_-}VPARm3crFQjLYV! z9zbz82$|l01mv`$WahE2$=fAGWkd^X2kY(J7iz}WGS z@%MyBEO=A?HB9=^?nX`@nh;7;laAjs+fbo!|K^mE!tOB>$2a_O0y-*uaIn8k^6Y zSbuv;5~##*4Y~+y7Z5O*3w4qgI5V^17u*ZeupVGH^nM&$qmAk|anf*>r zWc5CV;-JY-Z@Uq1Irpb^O`L_7AGiqd*YpGUShb==os$uN3yYvb`wm6d=?T*it&pDk zo`vhw)RZX|91^^Wa_ti2zBFyWy4cJu#g)_S6~jT}CC{DJ_kKpT`$oAL%b^!2M;JgT zM3ZNbUB?}kP(*YYvXDIH8^7LUxz5oE%kMhF!rnPqv!GiY0o}NR$OD=ITDo9r%4E>E0Y^R(rS^~XjWyVI6 zMOR5rPXhTp*G*M&X#NTL`Hu*R+u*QNoiOKg4CtNPrjgH>c?Hi4MUG#I917fx**+pJfOo!zFM&*da&G_x)L(`k&TPI*t3e^{crd zX<4I$5nBQ8Ax_lmNRa~E*zS-R0sxkz`|>7q_?*e%7bxqNm3_eRG#1ae3gtV9!fQpY z+!^a38o4ZGy9!J5sylDxZTx$JmG!wg7;>&5H1)>f4dXj;B+@6tMlL=)cLl={jLMxY zbbf1ax3S4>bwB9-$;SN2?+GULu;UA-35;VY*^9Blx)Jwyb$=U!D>HhB&=jSsd^6yw zL)?a|>GxU!W}ocTC(?-%z3!IUhw^uzc`Vz_g>-tv)(XA#JK^)ZnC|l1`@CdX1@|!| z_9gQ)7uOf?cR@KDp97*>6X|;t@Y`k_N@)aH7gY27)COv^P3ya9I{4z~vUjLR9~z1Z z5=G{mVtKH*&$*t0@}-i_v|3B$AHHYale7>E+jP`ClqG%L{u;*ff_h@)al?RuL7tOO z->;I}>%WI{;vbLP3VIQ^iA$4wl6@0sDj|~112Y4OFjMs`13!$JGkp%b&E8QzJw_L5 zOnw9joc0^;O%OpF$Qp)W1HI!$4BaXX84`%@#^dk^hFp^pQ@rx4g(8Xjy#!X%+X5Jd@fs3amGT`}mhq#L97R>OwT5-m|h#yT_-v@(k$q7P*9X~T*3)LTdzP!*B} z+SldbVWrrwQo9wX*%FyK+sRXTa@O?WM^FGWOE?S`R(0P{<6p#f?0NJvnBia?k^fX2 zNQs7K-?EijgHJY}&zsr;qJ<*PCZUd*x|dD=IQPUK_nn)@X4KWtqoJNHkT?ZWL_hF? zS8lp2(q>;RXR|F;1O}EE#}gCrY~#n^O`_I&?&z5~7N;zL0)3Tup`%)oHMK-^r$NT% zbFg|o?b9w(q@)6w5V%si<$!U<#}s#x@0aX-hP>zwS#9*75VXA4K*%gUc>+yzupTDBOKH8WR4V0pM(HrfbQ&eJ79>HdCvE=F z|J>s;;iDLB^3(9}?biKbxf1$lI!*Z%*0&8UUq}wMyPs_hclyQQi4;NUY+x2qy|0J; zhn8;5)4ED1oHwg+VZF|80<4MrL97tGGXc5Sw$wAI#|2*cvQ=jB5+{AjMiDHmhUC*a zlmiZ`LAuAn_}hftXh;`Kq0zblDk8?O-`tnilIh|;3lZp@F_osJUV9`*R29M?7H{Fy z`nfVEIDIWXmU&YW;NjU8)EJpXhxe5t+scf|VXM!^bBlwNh)~7|3?fWwo_~ZFk(22% zTMesYw+LNx3J-_|DM~`v93yXe=jPD{q;li;5PD?Dyk+b? zo21|XpT@)$BM$%F=P9J19Vi&1#{jM3!^Y&fr&_`toi`XB1!n>sbL%U9I5<7!@?t)~ z;&H%z>bAaQ4f$wIzkjH70;<8tpUoxzKrPhn#IQfS%9l5=Iu))^XC<58D!-O z{B+o5R^Z21H0T9JQ5gNJnqh#qH^na|z92=hONIM~@_iuOi|F>jBh-?aA20}Qx~EpDGElELNn~|7WRXRFnw+Wdo`|# zBpU=Cz3z%cUJ0mx_1($X<40XEIYz(`noWeO+x#yb_pwj6)R(__%@_Cf>txOQ74wSJ z0#F3(zWWaR-jMEY$7C*3HJrohc79>MCUu26mfYN)f4M~4gD`}EX4e}A!U}QV8!S47 z6y-U-%+h`1n`*pQuKE%Av0@)+wBZr9mH}@vH@i{v(m-6QK7Ncf17x_D=)32`FOjjo zg|^VPf5c6-!FxN{25dvVh#fog=NNpXz zfB$o+0jbRkHH{!TKhE709f+jI^$3#v1Nmf80w`@7-5$1Iv_`)W^px8P-({xwb;D0y z7LKDAHgX<84?l!I*Dvi2#D@oAE^J|g$3!)x1Ua;_;<@#l1fD}lqU2_tS^6Ht$1Wl} zBESo7o^)9-Tjuz$8YQSGhfs{BQV6zW7dA?0b(Dbt=UnQs&4zHfe_sj{RJ4uS-vQpC zX;Bbsuju4%!o8?&m4UZU@~ZZjeFF6ex2ss5_60_JS_|iNc+R0GIjH1@Z z=rLT9%B|WWgOrR7IiIwr2=T;Ne?30M!@{%Qf8o`!>=s<2CBpCK_TWc(DX51>e^xh8 z&@$^b6CgOd7KXQV&Y4%}_#uN*mbanXq(2=Nj`L7H7*k(6F8s6{FOw@(DzU`4-*77{ zF+dxpv}%mFpYK?>N_2*#Y?oB*qEKB}VoQ@bzm>ptmVS_EC(#}Lxxx730trt0G)#$b zE=wVvtqOct1%*9}U{q<)2?{+0TzZzP0jgf9*)arV)*e!f`|jgT{7_9iS@e)recI#z zbzolURQ+TOzE!ymqvBY7+5NnAbWxvMLsLTwEbFqW=CPyCsmJ}P1^V30|D5E|p3BC5 z)3|qgw@ra7aXb-wsa|l^in~1_fm{7bS9jhVRkYVO#U{qMp z)Wce+|DJ}4<2gp8r0_xfZpMo#{Hl2MfjLcZdRB9(B(A(f;+4s*FxV{1F|4d`*sRNd zp4#@sEY|?^FIJ;tmH{@keZ$P(sLh5IdOk@k^0uB^BWr@pk6mHy$qf&~rI>P*a;h0C{%oA*i!VjWn&D~O#MxN&f@1Po# zKN+ zrGrkSjcr?^R#nGl<#Q722^wbYcgW@{+6CBS<1@%dPA8HC!~a`jTz<`g_l5N1M@9wn9GOAZ>nqNgq!yOCbZ@1z`U_N`Z>}+1HIZxk*5RDc&rd5{3qjRh8QmT$VyS;jK z;AF+r6XnnCp=wQYoG|rT2@8&IvKq*IB_WvS%nt%e{MCFm`&W*#LXc|HrD?nVBo=(8*=Aq?u$sDA_sC_RPDUiQ+wnIJET8vx$&fxkW~kP9qXKt zozR)@xGC!P)CTkjeWvXW5&@2?)qt)jiYWWBU?AUtzAN}{JE1I)dfz~7$;}~BmQF`k zpn11qmObXwRB8&rnEG*#4Xax3XBkKlw(;tb?Np^i+H8m(Wyz9k{~ogba@laiEk;2! zV*QV^6g6(QG%vX5Um#^sT&_e`B1pBW5yVth~xUs#0}nv?~C#l?W+9Lsb_5)!71rirGvY zTIJ$OPOY516Y|_014sNv+Z8cc5t_V=i>lWV=vNu#!58y9Zl&GsMEW#pPYPYGHQ|;vFvd*9eM==$_=vc7xnyz0~ zY}r??$<`wAO?JQk@?RGvkWVJlq2dk9vB(yV^vm{=NVI8dhsX<)O(#nr9YD?I?(VmQ z^r7VfUBn<~p3()8yOBjm$#KWx!5hRW)5Jl7wY@ky9lNM^jaT##8QGVsYeaVywmpv>X|Xj7gWE1Ezai&wVLt3p)k4w~yrskT-!PR!kiyQlaxl(( zXhF%Q9x}1TMt3~u@|#wWm-Vq?ZerK={8@~&@9r5JW}r#45#rWii};t`{5#&3$W)|@ zbAf2yDNe0q}NEUvq_Quq3cTjcw z@H_;$hu&xllCI9CFDLuScEMg|x{S7GdV8<&Mq=ezDnRZAyX-8gv97YTm0bg=d)(>N z+B2FcqvI9>jGtnK%eO%y zoBPkJTk%y`8TLf4)IXPBn`U|9>O~WL2C~C$z~9|0m*YH<-vg2CD^SX#&)B4ngOSG$ zV^wmy_iQk>dfN@Pv(ckfy&#ak@MLC7&Q6Ro#!ezM*VEh`+b3Jt%m(^T&p&WJ2Oqvj zs-4nq0TW6cv~(YI$n0UkfwN}kg3_fp?(ijSV#tR9L0}l2qjc7W?i*q01=St0eZ=4h zyGQbEw`9OEH>NMuIe)hVwYHsGERWOD;JxEiO7cQv%pFCeR+IyhwQ|y@&^24k+|8fD zLiOWFNJ2&vu2&`Jv96_z-Cd5RLgmeY3*4rDOQo?Jm`;I_(+ejsPM03!ly!*Cu}Cco zrQSrEDHNyzT(D5s1rZq!8#?f6@v6dB7a-aWs(Qk>N?UGAo{gytlh$%_IhyL7h?DLXDGx zgxGEBQoCAWo-$LRvM=F5MTle`M})t3vVv;2j0HZY&G z22^iGhV@uaJh(XyyY%} zd4iH_UfdV#T=3n}(Lj^|n;O4|$;xhu*8T3hR1mc_A}fK}jfZ7LX~*n5+`8N2q#rI$ z@<_2VANlYF$vIH$ zl<)+*tIWW78IIINA7Rr7i{<;#^yzxoLNkXL)eSs=%|P>$YQIh+ea_3k z_s7r4%j7%&*NHSl?R4k%1>Z=M9o#zxY!n8sL5>BO-ZP;T3Gut>iLS@U%IBrX6BA3k z)&@q}V8a{X<5B}K5s(c(LQ=%v1ocr`t$EqqY0EqVjr65usa=0bkf|O#ky{j3)WBR(((L^wmyHRzoWuL2~WTC=`yZ zn%VX`L=|Ok0v7?s>IHg?yArBcync5rG#^+u)>a%qjES%dRZoIyA8gQ;StH z1Ao7{<&}6U=5}4v<)1T7t!J_CL%U}CKNs-0xWoTTeqj{5{?Be$L0_tk>M9o8 zo371}S#30rKZFM{`H_(L`EM9DGp+Mifk&IP|C2Zu_)Ghr4Qtpmkm1osCf@%Z$%t+7 zYH$Cr)Ro@3-QDeQJ8m+x6%;?YYT;k6Z0E-?kr>x33`H%*ueBD7Zx~3&HtWn0?2Wt} zTG}*|v?{$ajzt}xPzV%lL1t-URi8*Zn)YljXNGDb>;!905Td|mpa@mHjIH%VIiGx- zd@MqhpYFu4_?y5N4xiHn3vX&|e6r~Xt> zZG`aGq|yTNjv;9E+Txuoa@A(9V7g?1_T5FzRI;!=NP1Kqou1z5?%X~Wwb{trRfd>i z8&y^H)8YnKyA_Fyx>}RNmQIczT?w2J4SNvI{5J&}Wto|8FR(W;Qw#b1G<1%#tmYzQ zQ2mZA-PAdi%RQOhkHy9Ea#TPSw?WxwL@H@cbkZwIq0B!@ns}niALidmn&W?!Vd4Gj zO7FiuV4*6Mr^2xlFSvM;Cp_#r8UaqIzHJQg_z^rEJw&OMm_8NGAY2)rKvki|o1bH~ z$2IbfVeY2L(^*rMRU1lM5Y_sgrDS`Z??nR2lX;zyR=c%UyGb*%TC-Dil?SihkjrQy~TMv6;BMs7P8il`H7DmpVm@rJ;b)hW)BL)GjS154b*xq-NXq2cwE z^;VP7ua2pxvCmxrnqUYQMH%a%nHmwmI33nJM(>4LznvY*k&C0{8f*%?zggpDgkuz&JBx{9mfb@wegEl2v!=}Sq2Gaty0<)UrOT0{MZtZ~j5y&w zXlYa_jY)I_+VA-^#mEox#+G>UgvM!Ac8zI<%JRXM_73Q!#i3O|)lOP*qBeJG#BST0 zqohi)O!|$|2SeJQo(w6w7%*92S})XfnhrH_Z8qe!G5>CglP=nI7JAOW?(Z29;pXJ9 zR9`KzQ=WEhy*)WH>$;7Cdz|>*i>=##0bB)oU0OR>>N<21e4rMCHDemNi2LD>Nc$;& zQRFthpWniC1J6@Zh~iJCoLOxN`oCKD5Q4r%ynwgUKPlIEd#?QViIqovY|czyK8>6B zSP%{2-<;%;1`#0mG^B(8KbtXF;Nf>K#Di72UWE4gQ%(_26Koiad)q$xRL~?pN71ZZ zujaaCx~jXjygw;rI!WB=xrOJO6HJ!!w}7eiivtCg5K|F6$EXa)=xUC za^JXSX98W`7g-tm@uo|BKj39Dl;sg5ta;4qjo^pCh~{-HdLl6qI9Ix6f$+qiZ$}s= zNguKrU;u+T@ko(Vr1>)Q%h$?UKXCY>3se%&;h2osl2D zE4A9bd7_|^njDd)6cI*FupHpE3){4NQ*$k*cOWZ_?CZ>Z4_fl@n(mMnYK62Q1d@+I zr&O))G4hMihgBqRIAJkLdk(p(D~X{-oBUA+If@B}j& zsHbeJ3RzTq96lB7d($h$xTeZ^gP0c{t!Y0c)aQE;$FY2!mACg!GDEMKXFOPI^)nHZ z`aSPJpvV0|bbrzhWWkuPURlDeN%VT8tndV8?d)eN*i4I@u zVKl^6{?}A?P)Fsy?3oi#clf}L18t;TjNI2>eI&(ezDK7RyqFxcv%>?oxUlonv(px) z$vnPzRH`y5A(x!yOIfL0bmgeMQB$H5wenx~!ujQK*nUBW;@Em&6Xv2%s(~H5WcU2R z;%Nw<$tI)a`Ve!>x+qegJnQsN2N7HaKzrFqM>`6R*gvh%O*-%THt zrB$Nk;lE;z{s{r^PPm5qz(&lM{sO*g+W{sK+m3M_z=4=&CC>T`{X}1Vg2PEfSj2x_ zmT*(x;ov%3F?qoEeeM>dUn$a*?SIGyO8m806J1W1o+4HRhc2`9$s6hM#qAm zChQ87b~GEw{ADfs+5}FJ8+|bIlIv(jT$Ap#hSHoXdd9#w<#cA<1Rkq^*EEkknUd4& zoIWIY)sAswy6fSERVm&!SO~#iN$OgOX*{9@_BWFyJTvC%S++ilSfCrO(?u=Dc?CXZ zzCG&0yVR{Z`|ZF0eEApWEo#s9osV>F{uK{QA@BES#&;#KsScf>y zvs?vIbI>VrT<*!;XmQS=bhq%46-aambZ(8KU-wOO2=en~D}MCToB_u;Yz{)1ySrPZ z@=$}EvjTdzTWU7c0ZI6L8=yP+YRD_eMMos}b5vY^S*~VZysrkq<`cK3>>v%uy7jgq z0ilW9KjVDHLv0b<1K_`1IkbTOINs0=m-22c%M~l=^S}%hbli-3?BnNq?b`hx^HX2J zIe6ECljRL0uBWb`%{EA=%!i^4sMcj+U_TaTZRb+~GOk z^ZW!nky0n*Wb*r+Q|9H@ml@Z5gU&W`(z4-j!OzC1wOke`TRAYGZVl$PmQ16{3196( zO*?`--I}Qf(2HIwb2&1FB^!faPA2=sLg(@6P4mN)>Dc3i(B0;@O-y2;lM4akD>@^v z=u>*|!s&9zem70g7zfw9FXl1bpJW(C#5w#uy5!V?Q(U35A~$dR%LDVnq@}kQm13{} zd53q3N(s$Eu{R}k2esbftfjfOITCL;jWa$}(mmm}d(&7JZ6d3%IABCapFFYjdEjdK z&4Edqf$G^MNAtL=uCDRs&Fu@FXRgX{*0<(@c3|PNHa>L%zvxWS={L8%qw`STm+=Rd zA}FLspESSIpE_^41~#5yI2bJ=9`oc;GIL!JuW&7YetZ?0H}$$%8rW@*J37L-~Rsx!)8($nI4 zZhcZ2^=Y+p4YPl%j!nFJA|*M^gc(0o$i3nlphe+~-_m}jVkRN{spFs(o0ajW@f3K{ zDV!#BwL322CET$}Y}^0ixYj2w>&Xh12|R8&yEw|wLDvF!lZ#dOTHM9pK6@Nm-@9Lnng4ZHBgBSrr7KI8YCC9DX5Kg|`HsiwJHg2(7#nS;A{b3tVO?Z% za{m5b3rFV6EpX;=;n#wltDv1LE*|g5pQ+OY&*6qCJZc5oDS6Z6JD#6F)bWxZSF@q% z+1WV;m!lRB!n^PC>RgQCI#D1br_o^#iPk>;K2hB~0^<~)?p}LG%kigm@moD#q3PE+ zA^Qca)(xnqw6x>XFhV6ku9r$E>bWNrVH9fum0?4s?Rn2LG{Vm_+QJHse6xa%nzQ?k zKug4PW~#Gtb;#5+9!QBgyB@q=sk9=$S{4T>wjFICStOM?__fr+Kei1 z3j~xPqW;W@YkiUM;HngG!;>@AITg}vAE`M2Pj9Irl4w1fo4w<|Bu!%rh%a(Ai^Zhi zs92>v5;@Y(Zi#RI*ua*h`d_7;byQSa*v9E{2x$<-_=5Z<7{%)}4XExANcz@rK69T0x3%H<@frW>RA8^swA+^a(FxK| zFl3LD*ImHN=XDUkrRhp6RY5$rQ{bRgSO*(vEHYV)3Mo6Jy3puiLmU&g82p{qr0F?ohmbz)f2r{X2|T2 z$4fdQ=>0BeKbiVM!e-lIIs8wVTuC_m7}y4A_%ikI;Wm5$9j(^Y z(cD%U%k)X>_>9~t8;pGzL6L-fmQO@K; zo&vQzMlgY95;1BSkngY)e{`n0!NfVgf}2mB3t}D9@*N;FQ{HZ3Pb%BK6;5#-O|WI( zb6h@qTLU~AbVW#_6?c!?Dj65Now7*pU{h!1+eCV^KCuPAGs28~3k@ueL5+u|Z-7}t z9|lskE`4B7W8wMs@xJa{#bsCGDFoRSNSnmNYB&U7 zVGKWe%+kFB6kb)e;TyHfqtU6~fRg)f|>=5(N36)0+C z`hv65J<$B}WUc!wFAb^QtY31yNleq4dzmG`1wHTj=c*=hay9iD071Hc?oYoUk|M*_ zU1GihAMBsM@5rUJ(qS?9ZYJ6@{bNqJ`2Mr+5#hKf?doa?F|+^IR!8lq9)wS3tF_9n zW_?hm)G(M+MYb?V9YoX^_mu5h-LP^TL^!Q9Z7|@sO(rg_4+@=PdI)WL(B7`!K^ND- z-uIuVDCVEdH_C@c71YGYT^_Scf_dhB8Z2Xy6vGtBSlYud9vggOqv^L~F{BraSE_t} zIkP+Hp2&nH^-MNEs}^`oMLy11`PQW$T|K(`Bu*(f@)mv1-qY(_YG&J2M2<7k;;RK~ zL{Fqj9yCz8(S{}@c)S!65aF<=&eLI{hAMErCx&>i7OeDN>okvegO87OaG{Jmi<|}D zaT@b|0X{d@OIJ7zvT>r+eTzgLq~|Dpu)Z&db-P4z*`M$UL51lf>FLlq6rfG)%doyp z)3kk_YIM!03eQ8Vu_2fg{+osaEJPtJ-s36R+5_AEG12`NG)IQ#TF9c@$99%0iye+ zUzZ57=m2)$D(5Nx!n)=5Au&O0BBgwxIBaeI(mro$#&UGCr<;C{UjJVAbVi%|+WP(a zL$U@TYCxJ=1{Z~}rnW;7UVb7+ZnzgmrogDxhjLGo>c~MiJAWs&&;AGg@%U?Y^0JhL ze(x6Z74JG6FlOFK(T}SXQfhr}RIFl@QXKnIcXYF)5|V~e-}suHILKT-k|<*~Ij|VF zC;t@=uj=hot~*!C68G8hTA%8SzOfETOXQ|3FSaIEjvBJp(A)7SWUi5!Eu#yWgY+;n zlm<$+UDou*V+246_o#V4kMdto8hF%%Lki#zPh}KYXmMf?hrN0;>Mv%`@{0Qn`Ujp) z=lZe+13>^Q!9zT);H<(#bIeRWz%#*}sgUX9P|9($kexOyKIOc`dLux}c$7It4u|Rl z6SSkY*V~g_B-hMPo_ak>>z@AVQ(_N)VY2kB3IZ0G(iDUYw+2d7W^~(Jq}KY=JnWS( z#rzEa&0uNhJ>QE8iiyz;n2H|SV#Og+wEZv=f2%1ELX!SX-(d3tEj$5$1}70Mp<&eI zCkfbByL7af=qQE@5vDVxx1}FSGt_a1DoE3SDI+G)mBAna)KBG4p8Epxl9QZ4BfdAN zFnF|Y(umr;gRgG6NLQ$?ZWgllEeeq~z^ZS7L?<(~O&$5|y)Al^iMKy}&W+eMm1W z7EMU)u^ke(A1#XCV>CZ71}P}0x)4wtHO8#JRG3MA-6g=`ZM!FcICCZ{IEw8Dm2&LQ z1|r)BUG^0GzI6f946RrBlfB1Vs)~8toZf~7)+G;pv&XiUO(%5bm)pl=p>nV^o*;&T z;}@oZSibzto$arQgfkp|z4Z($P>dTXE{4O=vY0!)kDO* zGF8a4wq#VaFpLfK!iELy@?-SeRrdz%F*}hjKcA*y@mj~VD3!it9lhRhX}5YOaR9$} z3mS%$2Be7{l(+MVx3 z(4?h;P!jnRmX9J9sYN#7i=iyj_5q7n#X(!cdqI2lnr8T$IfOW<_v`eB!d9xY1P=2q&WtOXY=D9QYteP)De?S4}FK6#6Ma z=E*V+#s8>L;8aVroK^6iKo=MH{4yEZ_>N-N z`(|;aOATba1^asjxlILk<4}f~`39dBFlxj>Dw(hMYKPO3EEt1@S`1lxFNM+J@uB7T zZ8WKjz7HF1-5&2=l=fqF-*@>n5J}jIxdDwpT?oKM3s8Nr`x8JnN-kCE?~aM1H!hAE z%%w(3kHfGwMnMmNj(SU(w42OrC-euI>Dsjk&jz3ts}WHqmMpzQ3vZrsXrZ|}+MHA7 z068obeXZTsO*6RS@o3x80E4ok``rV^Y3hr&C1;|ZZ0|*EKO`$lECUYG2gVFtUTw)R z4Um<0ZzlON`zTdvVdL#KFoMFQX*a5wM0Czp%wTtfK4Sjs)P**RW&?lP$(<}q%r68Z zS53Y!d@&~ne9O)A^tNrXHhXBkj~$8j%pT1%%mypa9AW5E&s9)rjF4@O3ytH{0z6riz|@< zB~UPh*wRFg2^7EbQrHf0y?E~dHlkOxof_a?M{LqQ^C!i2dawHTPYUE=X@2(3<=OOxs8qn_(y>pU>u^}3y&df{JarR0@VJn0f+U%UiF=$Wyq zQvnVHESil@d|8&R<%}uidGh7@u^(%?$#|&J$pvFC-n8&A>utA=n3#)yMkz+qnG3wd zP7xCnF|$9Dif@N~L)Vde3hW8W!UY0BgT2v(wzp;tlLmyk2%N|0jfG$%<;A&IVrOI< z!L)o>j>;dFaqA3pL}b-Je(bB@VJ4%!JeX@3x!i{yIeIso^=n?fDX`3bU=eG7sTc%g%ye8$v8P@yKE^XD=NYxTb zbf!Mk=h|otpqjFaA-vs5YOF-*GwWPc7VbaOW&stlANnCN8iftFMMrUdYNJ_Bnn5Vt zxfz@Ah|+4&P;reZxp;MmEI7C|FOv8NKUm8njF7Wb6Gi7DeODLl&G~}G4be&*Hi0Qw z5}77vL0P+7-B%UL@3n1&JPxW^d@vVwp?u#gVcJqY9#@-3X{ok#UfW3<1fb%FT`|)V~ggq z(3AUoUS-;7)^hCjdT0Kf{i}h)mBg4qhtHHBti=~h^n^OTH5U*XMgDLIR@sre`AaB$ zg)IGBET_4??m@cx&c~bA80O7B8CHR7(LX7%HThkeC*@vi{-pL%e)yXp!B2InafbDF zjPXf1mko3h59{lT6EEbxKO1Z5GF71)WwowO6kY|6tjSVSWdQ}NsK2x{>i|MKZK8%Q zfu&_0D;CO-Jg0#YmyfctyJ!mRJp)e#@O0mYdp|8x;G1%OZQ3Q847YWTyy|%^cpA;m zze0(5p{tMu^lDkpe?HynyO?a1$_LJl2L&mpeKu%8YvgRNr=%2z${%WThHG=vrWY@4 zsA`OP#O&)TetZ>s%h!=+CE15lOOls&nvC~$Qz0Ph7tHiP;O$i|eDwpT{cp>+)0-|; zY$|bB+Gbel>5aRN3>c0x)4U=|X+z+{ zn*_p*EQoquRL+=+p;=lm`d71&1NqBz&_ph)MXu(Nv6&XE7(RsS)^MGj5Q?Fwude-(sq zjJ>aOq!7!EN>@(fK7EE#;i_BGvli`5U;r!YA{JRodLBc6-`n8K+Fjgwb%sX;j=qHQ z7&Tr!)!{HXoO<2BQrV9Sw?JRaLXV8HrsNevvnf>Y-6|{T!pYLl7jp$-nEE z#X!4G4L#K0qG_4Z;Cj6=;b|Be$hi4JvMH!-voxqx^@8cXp`B??eFBz2lLD8RRaRGh zn7kUfy!YV~p(R|p7iC1Rdgt$_24i0cd-S8HpG|`@my70g^y`gu%#Tf_L21-k?sRRZHK&at(*ED0P8iw{7?R$9~OF$Ko;Iu5)ur5<->x!m93Eb zFYpIx60s=Wxxw=`$aS-O&dCO_9?b1yKiPCQmSQb>T)963`*U+Ydj5kI(B(B?HNP8r z*bfSBpSu)w(Z3j7HQoRjUG(+d=IaE~tv}y14zHHs|0UcN52fT8V_<@2ep_ee{QgZG zmgp8iv4V{k;~8@I%M3<#B;2R>Ef(Gg_cQM7%}0s*^)SK6!Ym+~P^58*wnwV1BW@eG z4sZLqsUvBbFsr#8u7S1r4teQ;t)Y@jnn_m5jS$CsW1um!p&PqAcc8!zyiXHVta9QC zY~wCwCF0U%xiQPD_INKtTb;A|Zf29(mu9NI;E zc-e>*1%(LSXB`g}kd`#}O;veb<(sk~RWL|f3ljxCnEZDdNSTDV6#Td({6l&y4IjKF z^}lIUq*ZUqgTPumD)RrCN{M^jhY>E~1pn|KOZ5((%F)G|*ZQ|r4zIbrEiV%42hJV8 z3xS)=!X1+=olbdGJ=yZil?oXLct8FM{(6ikLL3E%=q#O6(H$p~gQu6T8N!plf!96| z&Q3=`L~>U0zZh;z(pGR2^S^{#PrPxTRHD1RQOON&f)Siaf`GLj#UOk&(|@0?zm;Sx ztsGt8=29-MZs5CSf1l1jNFtNt5rFNZxJPvkNu~2}7*9468TWm>nN9TP&^!;J{-h)_ z7WsHH9|F%I`Pb!>KAS3jQWKfGivTVkMJLO-HUGM_a4UQ_%RgL6WZvrW+Z4ujZn;y@ zz9$=oO!7qVTaQAA^BhX&ZxS*|5dj803M=k&2%QrXda`-Q#IoZL6E(g+tN!6CA!CP* zCpWtCujIea)ENl0liwVfj)Nc<9mV%+e@=d`haoZ*`B7+PNjEbXBkv=B+Pi^~L#EO$D$ZqTiD8f<5$eyb54-(=3 zh)6i8i|jp(@OnRrY5B8t|LFXFQVQ895n*P16cEKTrT*~yLH6Z4e*bZ5otpRDri&+A zfNbK1D5@O=sm`fN=WzWyse!za5n%^+6dHPGX#8DyIK>?9qyX}2XvBWVqbP%%D)7$= z=#$WulZlZR<{m#gU7lwqK4WS1Ne$#_P{b17qe$~UOXCl>5b|6WVh;5vVnR<%d+Lnp z$uEmML38}U4vaW8>shm6CzB(Wei3s#NAWE3)a2)z@i{4jTn;;aQS)O@l{rUM`J@K& l00vQ5JBs~;vo!vr%%-k{2_Fq1Mn4QF81S)AQ99zk{{c4yR+0b! literal 0 HcmV?d00001 diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..b82aa23 --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100755 index 0000000..1aa94a4 --- /dev/null +++ b/gradlew @@ -0,0 +1,249 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..25da30d --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/lombok.config b/lombok.config new file mode 100644 index 0000000..0586c81 --- /dev/null +++ b/lombok.config @@ -0,0 +1,5 @@ +config.stopBubbling = true +lombok.addLombokGeneratedAnnotation = true +lombok.anyConstructor.addConstructorProperties = true +lombok.equalsAndHashCode.callSuper = call +lombok.tostring.callsuper = call diff --git a/plugin-transforms-grok/build.gradle b/plugin-transforms-grok/build.gradle new file mode 100644 index 0000000..2874c84 --- /dev/null +++ b/plugin-transforms-grok/build.gradle @@ -0,0 +1,17 @@ +project.description = 'Kestra Plugin Transformation for Grok.' + +jar { + manifest { + attributes( + "X-Kestra-Name": project.name, + "X-Kestra-Title": "Json", + "X-Kestra-Group": project.group + ".grok", + "X-Kestra-Description": project.description, + "X-Kestra-Version": project.version + ) + } +} + +dependencies { + implementation 'org.jruby.joni:joni:2.2.1' +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/GrokTransform.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/GrokTransform.java new file mode 100644 index 0000000..ce94d7e --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/GrokTransform.java @@ -0,0 +1,220 @@ +package io.kestra.plugin.transforms.grok; + +import com.fasterxml.jackson.databind.ObjectMapper; +import io.kestra.core.models.annotations.Example; +import io.kestra.core.models.annotations.Plugin; +import io.kestra.core.models.annotations.PluginProperty; +import io.kestra.core.models.tasks.Output; +import io.kestra.core.models.tasks.RunnableTask; +import io.kestra.core.models.tasks.Task; +import io.kestra.core.runners.RunContext; +import io.kestra.core.serializers.JacksonMapper; +import io.kestra.plugin.transforms.grok.pattern.GrokMatcher; +import io.kestra.plugin.transforms.grok.pattern.GrokPatternCompiler; +import io.kestra.plugin.transforms.grok.pattern.GrokPatternResolver; +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotNull; +import lombok.AccessLevel; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.ToString; +import lombok.experimental.SuperBuilder; + +import java.io.File; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +@SuperBuilder +@ToString +@EqualsAndHashCode +@Getter +@NoArgsConstructor +@Schema( + title = "Parse arbitrary text and structure it using Grok expressions.", + description = """ + The `GrokTransform` task is similar to the famous Logstash Grok filter from the ELK stack. + It is particularly useful for transforming unstructured data such as logs into a structured, indexable, and queryable data structure. + + The `GrokTransform` ships with all the default patterns as defined You can find them here: . + """ +) +@Plugin( + examples = { + @Example( + title = "Consume, parse, and structure logs events from Kafka topic.", + full = true, + code = """ + id: kafka + namespace: myteam + + tasks: + - id: grok + type: io.kestra.plugin.transforms.grok.GrokTransform + pattern: "%{TIMESTAMP_ISO8601:logdate} %{LOGLEVEL:loglevel} %{GREEDYDATA:message}" + from: "{{ trigger.value }}" + + - id: log_on_warn + type: io.kestra.plugin.core.flow.If + condition: "{{ grok.value['LOGLEVEL'] == 'ERROR' }}" + then: + - id: when_true + type: io.kestra.plugin.core.log.Log + message: "{{ grok.value }}" + + triggers: + - id: realtime_trigger + type: io.kestra.plugin.kafka.RealtimeTrigger + topic: test_kestra + properties: + bootstrap.servers: localhost:9092 + serdeProperties: + schema.registry.url: http://localhost:8085 + keyDeserializer: STRING + valueDeserializer: STRING + groupId: kafkaConsumerGroupId + """ + ) + } +) +public class GrokTransform extends Task implements RunnableTask { + + private static final ObjectMapper ION_OBJECT_MAPPER = JacksonMapper.ofIon(); + + @Schema( + title = "The string object to transform.", + description = "Must be a valid JSON string or a `kestra://` internal storage URI." + ) + @PluginProperty(dynamic = true) + @NotNull + private String from; + + @PluginProperty + @Schema(title = "A Grok pattern to match.") + private String pattern; + + @PluginProperty + @Schema(title = "A list of Grok patterns to match.") + private List patterns; + + @PluginProperty + @Schema( + title = "List of user-defined pattern directories", + description = "Directories must be paths relative to the working directory." + ) + @NotNull + private List patternsDir; + + @PluginProperty + @Schema( + title = "Custom pattern definitions", + description = "A map of pattern-name and pattern pairs defining custom patterns to be used by the current tasks. Patterns matching existing names will override the pre-existing definition. " + ) + private Map patternDefinitions; + + @PluginProperty + @Schema(title = "If `true`, only store named captures from grok.") + @Builder.Default + private boolean namedCapturesOnly = true; + + @PluginProperty + @Schema( + title = "If `true`, break on first match.", + description = "The first successful match by grok will result in the task being finished. Set to `false` if you want the task to try all configured patterns." + ) + @Builder.Default + private boolean breakOnFirstMatch = true; + + @Getter(AccessLevel.PRIVATE) + private RunContext runContext; + + /** + * {@inheritDoc} + **/ + @Override + public Output run(RunContext runContext) throws Exception { + + this.runContext = runContext; + + final byte[] bytes = runContext.render(this.from).getBytes(StandardCharsets.UTF_8); + + GrokPatternCompiler compiler = new GrokPatternCompiler( + new GrokPatternResolver( + runContext.logger(), + patternDefinitions(), + patternsDir() + ), + namedCapturesOnly + ); + + // compile all patterns + List matchPatterns = patterns() + .stream() + .map(compiler::compile) + .toList(); + + // match patterns + final List> allNamedCaptured = new ArrayList<>(matchPatterns.size()); + for (GrokMatcher matcher : matchPatterns) { + final Map captured = matcher.captures(bytes); + if (captured != null) { + allNamedCaptured.add(captured); + if (breakOnFirstMatch) break; + } + } + + // merge all named captured + Map mergedValues = new HashMap<>(); + for (Map namedCaptured : allNamedCaptured) { + mergedValues.putAll(namedCaptured); + } + + // return + return Output + .builder() + .values(mergedValues) + .build(); + } + + private Map patternDefinitions() { + return Optional.ofNullable(patternDefinitions).orElse(Collections.emptyMap()); + } + + private List patternsDir() { + if (this.patternsDir == null || this.patternsDir.isEmpty()) return Collections.emptyList(); + + return this.patternsDir + .stream() + .map(dir -> runContext.workingDir().resolve(Path.of(dir))) + .map(Path::toFile) + .collect(Collectors.toList()); + } + + private List patterns() { + if (pattern != null) return List.of(pattern); + + if (patterns == null || patterns.isEmpty()) { + throw new IllegalArgumentException( + "Missing required configuration, either `pattern` or `patterns` properties must not be empty."); + } + return patterns; + } + + @Builder + @Getter + public static class Output implements io.kestra.core.models.tasks.Output { + @Schema( + title = "The values captured from matching the Grok expressions." + ) + private final Map values; + + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/Type.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/Type.java new file mode 100644 index 0000000..02e4a4f --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/Type.java @@ -0,0 +1,65 @@ +package io.kestra.plugin.transforms.grok.data; + +import io.kestra.plugin.transforms.grok.data.internal.TypeConverter; +import jakarta.annotation.Nullable; +import jakarta.validation.constraints.NotNull; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Locale; + +public enum Type { + + SHORT(Short.class), + + INT(Integer.class), + + LONG(Long.class), + + FLOAT(Float.class), + + DOUBLE(Double.class), + + BOOLEAN(Boolean.class), + + STRING(String.class), + + DURATION(Duration.class); + + private final Class objectType; + + /** + * Creates a new {@link Type} instance. + * + * @param objectType the class-type. + */ + Type(final Class objectType) { + this.objectType = objectType; + } + + /** + * Converts the specified object to this type. + * + * @param o the object to be converted. + * @return the converted object. + */ + public Object convert(final Object o) { + return TypeConverter.newForType(objectType).convertValue(o); + } + + /** + * Gets the enum for specified string name. + * + * @param value The enum raw value. + * @param defaultValue The fallback map for unknown string. + * @return The Enum. + * @throws IllegalArgumentException if no enum exists for the specified value. + */ + public static Type getForNameIgnoreCase(final @Nullable String value, final @NotNull Type defaultValue) { + if (value == null) throw new IllegalArgumentException("Unsupported value 'null'"); + return Arrays.stream(Type.values()) + .filter(e -> e.name().equals(value.toUpperCase(Locale.ROOT))) + .findFirst() + .orElse(defaultValue); + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/internal/TypeConverter.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/internal/TypeConverter.java new file mode 100644 index 0000000..6a955a5 --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/data/internal/TypeConverter.java @@ -0,0 +1,97 @@ +package io.kestra.plugin.transforms.grok.data.internal; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.type.CollectionType; +import com.fasterxml.jackson.databind.type.TypeFactory; + +import java.util.List; +import java.util.Objects; +import java.util.Set; + +/** + * Service class for converting Object of one type to another. + * + * @param the target type. + */ +public final class TypeConverter { + + private static final ObjectMapper DEFAULT_OBJECT_MAPPER = JsonMapper.builder() + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + .configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true) + .configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS, true) + .build(); + + private final ObjectMapper objectMapper; + private final JavaType type; + + /** + * Creates a new converter for converting object into the given type. + * + * @param objectType Type of object. + * @param Type of object. + * @return The converter. + */ + public static TypeConverter newForType(final TypeReference objectType) { + TypeFactory typeFactory = DEFAULT_OBJECT_MAPPER.getTypeFactory(); + JavaType type = typeFactory.constructType(objectType); + return new TypeConverter<>(DEFAULT_OBJECT_MAPPER, type); + } + + /** + * Creates a new converter for converting object into the given type. + * + * @param objectType Type of object. + * @param Type of object. + * @return The converter. + */ + public static TypeConverter newForType(final Class objectType) { + TypeFactory typeFactory = DEFAULT_OBJECT_MAPPER.getTypeFactory(); + JavaType type = typeFactory.constructType(objectType); + return new TypeConverter<>(DEFAULT_OBJECT_MAPPER, type); + } + + /** + * Creates a new converter for converting object into a list of elements of the given type. + * + * @param elementClass Type of elements. + * @param Type of object. + * @return The converter. + */ + public static TypeConverter> newForList(Class elementClass) { + TypeFactory typeFactory = DEFAULT_OBJECT_MAPPER.getTypeFactory(); + CollectionType type = typeFactory.constructCollectionType(List.class, elementClass); + return new TypeConverter<>(DEFAULT_OBJECT_MAPPER, type); + } + + /** + * Creates a new converter for converting object into a list of elements of the given type. + * + * @param elementClass Type of elements. + * @param Type of object. + * @return The converter. + */ + public static TypeConverter> newForSet(Class elementClass) { + TypeFactory typeFactory = DEFAULT_OBJECT_MAPPER.getTypeFactory(); + CollectionType type = typeFactory.constructCollectionType(Set.class, elementClass); + return new TypeConverter<>(DEFAULT_OBJECT_MAPPER, type); + } + + /** + * Creates a new {@link TypeConverter} instance. + * + * @param objectMapper The {@link ObjectMapper}. + */ + public TypeConverter(ObjectMapper objectMapper, JavaType type) { + this.objectMapper = Objects.requireNonNull(objectMapper, "objectMapper cannot be null"); + this.type = Objects.requireNonNull(type, "type cannot be null"); + } + + public T convertValue(Object value) { + return value == null ? null : objectMapper.convertValue(value, type); + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureExtractor.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureExtractor.java new file mode 100644 index 0000000..6a49a3b --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureExtractor.java @@ -0,0 +1,38 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import org.joni.Region; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public interface GrokCaptureExtractor { + + void extract(final byte[] bytes, final Region region); + + class MapGrokCaptureExtractor implements GrokCaptureExtractor { + + private final List extractors; + + private final Map captured = new HashMap<>(); + + public MapGrokCaptureExtractor(final List grokCaptureGroups) { + this.extractors = grokCaptureGroups + .stream() + .map(group -> group.getExtractor(o -> captured.put(group.name(), o))) + .collect(Collectors.toList()); + } + + @Override + public void extract(final byte[] bytes, final Region region) { + for (GrokCaptureExtractor extractor : extractors) { + extractor.extract(bytes, region); + } + } + + public Map captured() { + return captured; + } + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureGroup.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureGroup.java new file mode 100644 index 0000000..b7bfc32 --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokCaptureGroup.java @@ -0,0 +1,76 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import io.kestra.plugin.transforms.grok.data.Type; +import org.joni.Region; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Objects; +import java.util.function.Consumer; + +public final class GrokCaptureGroup { + + private final Type type; + private final String name; + private final int[] backRefs; + + public GrokCaptureGroup(final String name, final int[] backRefs, final Type type) { + this.type = Objects.requireNonNull(type, "type cannot be null"); + this.name = Objects.requireNonNull(name, "name cannot be null"); + this.backRefs = backRefs; + } + + /** + * Gets the type defined for the data field to capture. + */ + public Type type() { + return type; + } + + /** + * Gets the name defined for the data field to capture. + */ + public String name() { + return name; + } + + /** + * Gets the {@link GrokCaptureExtractor} to be used for capturing that group. + * + * @param consumer the {@link Consumer} to call when a data field is captured. + */ + public GrokCaptureExtractor getExtractor(final Consumer consumer) { + return new RawValueExtractor(backRefs, (s -> consumer.accept(type.convert(s)))); + } + + /** + * {@inheritDoc} + */ + @Override + public String toString() { + return "GrokCaptureGroup{" + + "type=" + type + + ", name='" + name + '\'' + + ", backRefs=" + Arrays.toString(backRefs) + + '}'; + } + + private record RawValueExtractor(int[] backRefs, Consumer consumer) implements GrokCaptureExtractor { + + /** + * {@inheritDoc} + */ + @Override + public void extract(byte[] bytes, Region region) { + for (int capture : backRefs) { + int offset = region.getBeg(capture); + int length = region.getEnd(capture) - offset; + if (offset >= 0) { + String value = new String(bytes, offset, length, StandardCharsets.UTF_8); + consumer.accept(value); + break; // we only need to capture the first value. + } + } + } + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokException.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokException.java new file mode 100644 index 0000000..93b9248 --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokException.java @@ -0,0 +1,14 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import io.kestra.core.exceptions.KestraRuntimeException; + +public class GrokException extends KestraRuntimeException { + + public GrokException(final String message) { + super(message); + } + + public GrokException(final String message, final Throwable cause) { + super(message, cause); + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcher.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcher.java new file mode 100644 index 0000000..529c19d --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcher.java @@ -0,0 +1,116 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import io.kestra.plugin.transforms.grok.data.Type; +import org.jcodings.specific.UTF8Encoding; +import org.joni.Matcher; +import org.joni.NameEntry; +import org.joni.Option; +import org.joni.Regex; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +public final class GrokMatcher { + + private final Map patternsByName; + + private final List patterns; + + private final String expression; + + private final Regex regex; + + private final List grokCaptureGroups; + + /** + * Creates a new {@link GrokMatcher} instance. + * + * @param patterns the list of patterns. + * @param expression the original expression. + */ + GrokMatcher(final List patterns, + final String expression) { + Objects.requireNonNull(patterns, "pattern can't be null"); + Objects.requireNonNull(expression, "expression can't be null"); + this.patterns = patterns; + this.expression = expression; + this.patternsByName = patterns + .stream() + .collect(Collectors.toMap(GrokPattern::syntax, p -> p, (p1, p2) -> p1.semantic() != null ? p1 : p2)); + byte[] bytes = expression.getBytes(StandardCharsets.UTF_8); + regex = new Regex(bytes, 0, bytes.length, Option.NONE, UTF8Encoding.INSTANCE); + + grokCaptureGroups = new ArrayList<>(); + for (Iterator entry = regex.namedBackrefIterator(); entry.hasNext();) { + NameEntry nameEntry = entry.next(); + final String field = new String( + nameEntry.name, + nameEntry.nameP, + nameEntry.nameEnd - nameEntry.nameP, + StandardCharsets.UTF_8); + final GrokPattern pattern = getGrokPattern(field); + final Type type = pattern != null ? pattern.type() : Type.STRING; + grokCaptureGroups.add(new GrokCaptureGroup(field, nameEntry.getBackRefs(), type)); + } + } + + public GrokPattern getGrokPattern(final int i) { + return patterns.get(i); + } + + public GrokPattern getGrokPattern(final String name) { + return patternsByName.get(name); + } + + /** + * Returns the compiled regex expression. + */ + public Regex regex() { + return regex; + } + + /** + * Returns the raw regex expression. + */ + public String expression() { + return expression; + } + + /** + * + * @param bytes the text bytes to match. + * @return a {@code Map} that contains all named captured. + */ + public Map captures(final byte[] bytes) { + + long now = System.currentTimeMillis(); + final var extractor = new GrokCaptureExtractor.MapGrokCaptureExtractor(grokCaptureGroups); + + final Matcher matcher = regex.matcher(bytes); + int result = matcher.search(0, bytes.length, Option.DEFAULT); + + if (result == Matcher.FAILED) { + return null; + } + if (result == Matcher.INTERRUPTED) { + long interruptedAfterMs = System.currentTimeMillis() - now; + throw new RuntimeException("Grok pattern matching was interrupted before completion (" + interruptedAfterMs + " ms)"); + } + extractor.extract(bytes, matcher.getEagerRegion()); + + return extractor.captured(); + } + + @Override + public String toString() { + return "GrokMatcher{" + + "patterns=" + patterns + + ", expression='" + expression + '\'' + + '}'; + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPattern.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPattern.java new file mode 100644 index 0000000..581a00f --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPattern.java @@ -0,0 +1,32 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import io.kestra.plugin.transforms.grok.data.Type; + +/** + * GrokPattern. + * + * @param syntax the grok pattern syntax. + * @param semantic the grok pattern semantic. + * @param type the grok pattern type. + */ +public record GrokPattern( + String syntax, + String semantic, + Type type +) { + + /** + * Creates a new {@link GrokPattern} instance. + * + * @param syntax the grok pattern syntax. + * @param semantic the grok pattern semantic. + * @param type the grok pattern type. + */ + public static GrokPattern of(final String syntax, final String semantic, final String type) { + return new GrokPattern( + syntax, + semantic, + type != null ? Type.getForNameIgnoreCase(type.toUpperCase(), Type.STRING) : Type.STRING + ); + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompiler.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompiler.java new file mode 100644 index 0000000..53f7d6d --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompiler.java @@ -0,0 +1,91 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public final class GrokPatternCompiler { + + private static final Logger LOG = LoggerFactory.getLogger(GrokPatternCompiler.class); + + private static final String SYNTAX_FIELD = "syntax"; + private static final String SEMANTIC_FIELD = "semantic"; + private static final String TYPE_FIELD = "type"; + + private static final String REGEX = "(?:%\\{(?[A-Z0-9_]+)(?:\\:(?[a-zA-Z0-9_\\\\-]+))?(?:\\:(?[a-zA-Z0-9_\\\\-]+))?\\})"; + private static final Pattern PATTERN = Pattern.compile(REGEX); + + private final GrokPatternResolver resolver; + + private final boolean namedCapturesOnly; + + /** + * Creates a new {@link GrokPatternCompiler} instance. + * + * @param resolver the grok pattern resolver. + * @param namedCapturesOnly is only named pattern should be captured. + */ + public GrokPatternCompiler(final GrokPatternResolver resolver, + final boolean namedCapturesOnly) { + Objects.requireNonNull(resolver, "resolver can't be null"); + this.resolver = resolver; + this.namedCapturesOnly = namedCapturesOnly; + } + + public GrokMatcher compile(final String expression) { + Objects.requireNonNull(expression, "expression can't be null"); + LOG.trace("Starting to compile grok matcher expression : {}", expression); + ArrayList patterns = new ArrayList<>(); + final String regex = compileRegex(expression, patterns); + LOG.trace("Grok expression compiled to regex : {}", regex); + return new GrokMatcher(patterns, regex); + } + + private String compileRegex(final String expression, final List patterns) { + Matcher matcher = PATTERN.matcher(expression); + final StringBuilder sb = new StringBuilder(); + while (matcher.find()) { + GrokPattern grok = GrokPattern.of( + matcher.group(SYNTAX_FIELD), + matcher.group(SEMANTIC_FIELD), + matcher.group(TYPE_FIELD) + ); + + patterns.add(grok); + + final String resolved = resolver.resolve(grok.syntax()); + String replacement = compileRegex(resolved, patterns); + if (grok.semantic() != null) { + replacement = capture(replacement, grok.semantic()); + } else if (!namedCapturesOnly) { + replacement = capture(replacement, grok.syntax()); + } + + // Properly escape $ characters + // Illegal group reference exception can arise when the replacement string for + // Matcher.appendReplacement contains illegal references to capture groups, + // which could occur when there are unresolved or incorrectly escaped group references. + + // This often happens with the $ character in regex, which signifies a group reference. + replacement = replacement.replace("\\", "\\\\").replace("$", "\\$"); + + try { + matcher.appendReplacement(sb, replacement); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Failed to : " + replacement, e); + } + } + // Copy the remainder of the input sequence. + matcher.appendTail(sb); + return sb.toString(); + } + + private String capture(final String expression, final String name) { + return "(?<" + name + ">" + expression + ")"; + } +} diff --git a/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolver.java b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolver.java new file mode 100644 index 0000000..c99303d --- /dev/null +++ b/plugin-transforms-grok/src/main/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolver.java @@ -0,0 +1,214 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystem; +import java.nio.file.FileSystemNotFoundException; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public final class GrokPatternResolver { + + private static final Logger LOG = LoggerFactory.getLogger(GrokPatternResolver.class); + + private static final Pattern DEFINITION = Pattern.compile("^(?[A-Z0-9_]+)(\\s)*(?.*)"); + + private static final String PATTERNS_PATH = "patterns"; + private static final String TAB = "\t "; + private static final String NEWLINE = "\n"; + private static final String SPACE = " "; + + private final Logger logger; + private final Map definitions; + + /** + * Creates a new {@link GrokPatternResolver} instance. + */ + public GrokPatternResolver() { + this(LOG, Collections.emptyMap(), Collections.emptyList()); + } + + /** + * Creates a new {@link GrokPatternResolver} instance. + * + * @param patternDefinitions a list of pattern-definitions + * @param patternsDir a list pattern directories to load. + */ + public GrokPatternResolver(final Logger logger, + final Map patternDefinitions, + final List patternsDir) { + this.logger = logger; + this.definitions = new LinkedHashMap<>(); + loadPredefinedPatterns(); + loadUserDefinedPatterns(patternsDir); + this.definitions.putAll(patternDefinitions); + } + + private void loadUserDefinedPatterns(final Collection patternsDir) { + if (patternsDir != null) { + for (File dir : patternsDir) { + if (!dir.exists() || !dir.canRead()) { + logger.error( + "Can't read pattern from user directory {} - directory doesn't exist or is readable", + patternsDir); + return; + } + + if (!dir.isDirectory()) { + logger.error("Can't read pattern from {} - not a directory", patternsDir); + return; + } + + try { + List paths = Files.list(dir.toPath()).toList(); + loadPatternDefinitions(paths); + } catch (IOException e) { + logger.error("Unexpected error occurred while reading user defined patterns", e); + } + } + } + } + + private void loadPredefinedPatterns() { + logger.debug("Looking for pre-defined patterns definitions from : {}", PATTERNS_PATH); + try { + ClassLoader cl = getClassLoader(); + URL url = cl.getResource(PATTERNS_PATH); + if (url != null) { + final String protocol = url.getProtocol(); + if (protocol != null && protocol.equals("jar")) { + try (FileSystem fs = getFileSystemFor(url)) { + final List paths = Files.walk(fs.getPath(PATTERNS_PATH)).filter(Files::isRegularFile).toList(); + loadPatternDefinitions(paths); + } + } else { + final List paths = Files.list(Paths.get(url.toURI())).toList(); + loadPatternDefinitions(paths); + } + } else { + logger.error("Failed to load pre-defined patterns definitions : {}", PATTERNS_PATH); + } + } catch (IOException | URISyntaxException e) { + logger.error("Unexpected error occurred while reading pre-defined patterns", e); + } + } + + private FileSystem getFileSystemFor(final URL url) throws URISyntaxException, IOException { + FileSystem fs; + try { + fs = FileSystems.getFileSystem(url.toURI()); + } catch (FileSystemNotFoundException e) { + fs = FileSystems.newFileSystem(url.toURI(), Collections.emptyMap()); + } + return fs; + } + + private void loadPatternDefinitions(final List paths) throws IOException { + for (final Path path : paths) { + Map patternDefinitions = readPatternDefinitionsFrom(path); + definitions.putAll(patternDefinitions); + logger.debug("Loaded patterns definitions from : {}", path.toUri()); + } + } + + private static ClassLoader getClassLoader() { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + if (cl == null) { + return GrokPatternResolver.class.getClassLoader(); + } + return cl; + } + + public String resolve(final String syntax) { + if (!definitions.containsKey(syntax)) { + throw new GrokException("No pattern definition found for syntax : " + syntax); + } + + return definitions.get(syntax); + } + + private Map readPatternDefinitionsFrom(final Path path) throws GrokException, IOException { + final InputStream is = Files.newInputStream(path, StandardOpenOption.READ); + try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + List definitions = br.lines().collect(Collectors.toList()); + return readPatterns(definitions); + } catch (IOException e) { + throw new GrokException("Unexpected error while reading pattern definition : " + path); + } + } + + private Map readPatterns(final Collection definitions) { + return definitions.stream() + .map(s -> { + KeyValue result = null; + Matcher matcher = DEFINITION.matcher(s); + if (matcher.matches()) { + String name = matcher.group("NAME"); + String pattern = matcher.group("PATTERN"); + result = new KeyValue<>(name, pattern); + } + return result; + }) + .filter(Objects::nonNull) + .collect(Collectors.toMap(KeyValue::key, KeyValue::value)); + } + + @VisibleForTesting + void print() { + StringBuilder sb = new StringBuilder(); + sb.append("Defined pattern definitions list : \n"); + definitions.forEach((k, v) -> { + sb.append(TAB) + .append(k) + .append(SPACE) + .append(v) + .append(NEWLINE); + + }); + LOG.info("{}", sb); + + } + + @VisibleForTesting + boolean isEmpty() { + return definitions.isEmpty(); + } + + @VisibleForTesting + public Map definitions() { + return definitions; + } + + private record KeyValue( + K key, + V value + ) { + + KeyValue { + Objects.requireNonNull(key, "key can't be null"); + Objects.requireNonNull(value, "value can't be null"); + } + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/main/resources/patterns/aws b/plugin-transforms-grok/src/main/resources/patterns/aws new file mode 100644 index 0000000..72f8d8e --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/aws @@ -0,0 +1,40 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/aws +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +S3_REQUEST_LINE (?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?) + +S3_ACCESS_LOG %{WORD:[aws][s3access][bucket_owner]} %{NOTSPACE:[aws][s3access][bucket]} \[%{HTTPDATE:timestamp}\] (?:-|%{IP:[client][ip]}) (?:-|%{NOTSPACE:[client][user][id]}) %{NOTSPACE:[aws][s3access][request_id]} %{NOTSPACE:[aws][s3access][operation]} (?:-|%{NOTSPACE:[aws][s3access][key]}) (?:-|"%{S3_REQUEST_LINE:[aws][s3access][request_uri]}") (?:-|%{INT:[http][response][status_code]:int}) (?:-|%{NOTSPACE:[aws][s3access][error_code]}) (?:-|%{INT:[aws][s3access][bytes_sent]:int}) (?:-|%{INT:[aws][s3access][object_size]:int}) (?:-|%{INT:[aws][s3access][total_time]:int}) (?:-|%{INT:[aws][s3access][turn_around_time]:int}) "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[aws][s3access][version_id]})(?: (?:-|%{NOTSPACE:[aws][s3access][host_id]}) (?:-|%{NOTSPACE:[aws][s3access][signature_version]}) (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][s3access][authentication_type]}) (?:-|%{NOTSPACE:[aws][s3access][host_header]}) (?:-|%{NOTSPACE:[aws][s3access][tls_version]}))? +# :long - %{INT:[aws][s3access][bytes_sent]:int} +# :long - %{INT:[aws][s3access][object_size]:int} + +ELB_URIHOST %{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:int})? +ELB_URIPATHQUERY %{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})? +# deprecated - old name: +ELB_URIPATHPARAM %{ELB_URIPATHQUERY} +ELB_URI %{URIPROTO:[url][scheme]}://(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{ELB_URIHOST})?(?:%{ELB_URIPATHQUERY})? + +ELB_REQUEST_LINE (?:%{WORD:[http][request][method]} %{ELB_URI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?) + +# pattern supports 'regular' HTTP ELB format +ELB_V1_HTTP_LOG %{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:[aws][elb][name]} %{IP:[source][ip]}:%{INT:[source][port]:int} (?:-|(?:%{IP:[aws][elb][backend][ip]}:%{INT:[aws][elb][backend][port]:int})) (?:-1|%{NUMBER:[aws][elb][request_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][backend_processing_time][sec]:float}) (?:-1|%{NUMBER:[aws][elb][response_processing_time][sec]:float}) %{INT:[http][response][status_code]:int} (?:-|%{INT:[aws][elb][backend][http][response][status_code]:int}) %{INT:[http][request][body][bytes]:int} %{INT:[http][response][body][bytes]:int} "%{ELB_REQUEST_LINE}"(?: "(?:-|%{DATA:[user_agent][original]})" (?:-|%{NOTSPACE:[tls][cipher]}) (?:-|%{NOTSPACE:[aws][elb][ssl_protocol]}))? +# :long - %{INT:[http][request][body][bytes]:int} +# :long - %{INT:[http][response][body][bytes]:int} + +ELB_ACCESS_LOG %{ELB_V1_HTTP_LOG} + +# Each edge location is identified by a three-letter code and an arbitrarily assigned number. +# The three-letter IATA code typically represents an airport near the edge location. +# examples: "LHR62-C2", "SFO5-P1", ""IND6", "CPT50" +CLOUDFRONT_EDGE_LOCATION [A-Z]{3}[0-9]{1,2}(?:-[A-Z0-9]{2})? + +# pattern used to match a shorted format, that's why we have the optional part (starting with *http.version*) at the end +CLOUDFRONT_ACCESS_LOG (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY}\t%{TIME})\t%{CLOUDFRONT_EDGE_LOCATION:[aws][cloudfront][x_edge_location]}\t(?:-|%{INT:[destination][bytes]:int})\t%{IPORHOST:[source][ip]}\t%{WORD:[http][request][method]}\t%{HOSTNAME:[url][domain]}\t%{NOTSPACE:[url][path]}\t(?:(?:000)|%{INT:[http][response][status_code]:int})\t(?:-|%{DATA:[http][request][referrer]})\t%{DATA:[user_agent][original]}\t(?:-|%{DATA:[url][query]})\t(?:-|%{DATA:[aws][cloudfront][http][request][cookie]})\t%{WORD:[aws][cloudfront][x_edge_result_type]}\t%{NOTSPACE:[aws][cloudfront][x_edge_request_id]}\t%{HOSTNAME:[aws][cloudfront][http][request][host]}\t%{URIPROTO:[network][protocol]}\t(?:-|%{INT:[source][bytes]:int})\t%{NUMBER:[aws][cloudfront][time_taken]:float}\t(?:-|%{IP:[network][forwarded_ip]})\t(?:-|%{DATA:[aws][cloudfront][ssl_protocol]})\t(?:-|%{NOTSPACE:[tls][cipher]})\t%{WORD:[aws][cloudfront][x_edge_response_result_type]}(?:\t(?:-|HTTP/%{NUMBER:[http][version]})\t(?:-|%{DATA:[aws][cloudfront][fle_status]})\t(?:-|%{DATA:[aws][cloudfront][fle_encrypted_fields]})\t%{INT:[source][port]:int}\t%{NUMBER:[aws][cloudfront][time_to_first_byte]:float}\t(?:-|%{DATA:[aws][cloudfront][x_edge_detailed_result_type]})\t(?:-|%{NOTSPACE:[http][request][mime_type]})\t(?:-|%{INT:[aws][cloudfront][http][request][size]:int})\t(?:-|%{INT:[aws][cloudfront][http][request][range][start]:int})\t(?:-|%{INT:[aws][cloudfront][http][request][range][end]:int}))? +# :long - %{INT:[destination][bytes]:int} +# :long - %{INT:[source][bytes]:int} +# :long - %{INT:[aws][cloudfront][http][request][size]:int} +# :long - %{INT:[aws][cloudfront][http][request][range][start]:int} +# :long - %{INT:[aws][cloudfront][http][request][range][end]:int} \ No newline at end of file diff --git a/plugin-transforms-grok/src/main/resources/patterns/bacula b/plugin-transforms-grok/src/main/resources/patterns/bacula new file mode 100644 index 0000000..8cc4643 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/bacula @@ -0,0 +1,60 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/bacula +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +BACULA_TIMESTAMP %{MONTHDAY}-%{MONTH}(?:-%{YEAR})? %{HOUR}:%{MINUTE} +BACULA_HOST %{HOSTNAME} +BACULA_VOLUME %{USER} +BACULA_DEVICE %{USER} +BACULA_DEVICEPATH %{UNIXPATH} +BACULA_CAPACITY %{INT}{1,3}(,%{INT}{3})* +BACULA_VERSION %{USER} +BACULA_JOB %{USER} + +BACULA_LOG_MAX_CAPACITY User defined maximum volume capacity %{BACULA_CAPACITY:[bacula][volume][max_capacity]} exceeded on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\).? +BACULA_LOG_END_VOLUME End of medium on Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" Bytes=%{BACULA_CAPACITY:[bacula][volume][bytes]} Blocks=%{BACULA_CAPACITY:[bacula][volume][blocks]} at %{BACULA_TIMESTAMP:[bacula][timestamp]}. +BACULA_LOG_NEW_VOLUME Created new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" in catalog. +BACULA_LOG_NEW_LABEL Labeled new Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on (?:file )?device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\). +BACULA_LOG_WROTE_LABEL Wrote label to prelabeled Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) +BACULA_LOG_NEW_MOUNT New volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" mounted on device \"%{BACULA_DEVICE:[bacula][volume][device]}\" \(%{BACULA_DEVICEPATH:[bacula][volume][path]}\) at %{BACULA_TIMESTAMP:[bacula][timestamp]}. +BACULA_LOG_NOOPEN \s*Cannot open %{DATA}: ERR=%{GREEDYDATA:[error][message]} +BACULA_LOG_NOOPENDIR \s*Could not open directory \"?%{DATA:[file][path]}\"?: ERR=%{GREEDYDATA:[error][message]} +BACULA_LOG_NOSTAT \s*Could not stat %{DATA:[file][path]}: ERR=%{GREEDYDATA:[error][message]} +BACULA_LOG_NOJOBS There are no more Jobs associated with Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\". Marking it purged. +BACULA_LOG_ALL_RECORDS_PRUNED .*?All records pruned from Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\"; marking it \"Purged\" +BACULA_LOG_BEGIN_PRUNE_JOBS Begin pruning Jobs older than %{INT} month %{INT} days . +BACULA_LOG_BEGIN_PRUNE_FILES Begin pruning Files. +BACULA_LOG_PRUNED_JOBS Pruned %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog. +BACULA_LOG_PRUNED_FILES Pruned Files from %{INT} Jobs* for client %{BACULA_HOST:[bacula][client][name]} from catalog. +BACULA_LOG_ENDPRUNE End auto prune. +BACULA_LOG_STARTJOB Start Backup JobId %{INT}, Job=%{BACULA_JOB:[bacula][job][name]} +BACULA_LOG_STARTRESTORE Start Restore Job %{BACULA_JOB:[bacula][job][name]} +BACULA_LOG_USEDEVICE Using Device \"%{BACULA_DEVICE:[bacula][volume][device]}\" +BACULA_LOG_DIFF_FS \s*%{UNIXPATH} is a different filesystem. Will not descend from %{UNIXPATH} into it. +BACULA_LOG_JOBEND Job write elapsed time = %{DATA:[bacula][job][elapsed_time]}, Transfer rate = %{NUMBER} (K|M|G)? Bytes/second +BACULA_LOG_NOPRUNE_JOBS No Jobs found to prune. +BACULA_LOG_NOPRUNE_FILES No Files found to prune. +BACULA_LOG_VOLUME_PREVWRITTEN Volume \"?%{BACULA_VOLUME:[bacula][volume][name]}\"? previously written, moving to end of data. +BACULA_LOG_READYAPPEND Ready to append to end of Volume \"%{BACULA_VOLUME:[bacula][volume][name]}\" size=%{INT:[bacula][volume][size]:int} +# :long - %{INT:[bacula][volume][size]:int} +BACULA_LOG_CANCELLING Cancelling duplicate JobId=%{INT:[bacula][job][other_id]}. +BACULA_LOG_MARKCANCEL JobId %{INT:[bacula][job][id]}, Job %{BACULA_JOB:[bacula][job][name]} marked to be canceled. +BACULA_LOG_CLIENT_RBJ shell command: run ClientRunBeforeJob \"%{GREEDYDATA:[bacula][job][client_run_before_command]}\" +BACULA_LOG_VSS (Generate )?VSS (Writer)? +BACULA_LOG_MAXSTART Fatal [eE]rror: Job canceled because max start delay time exceeded. +BACULA_LOG_DUPLICATE Fatal [eE]rror: JobId %{INT:[bacula][job][other_id]} already running. Duplicate job not allowed. +BACULA_LOG_NOJOBSTAT Fatal [eE]rror: No Job status returned from FD. +BACULA_LOG_FATAL_CONN Fatal [eE]rror: bsock.c:133 Unable to connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:int}. ERR=%{GREEDYDATA:[error][message]} +BACULA_LOG_NO_CONNECT Warning: bsock.c:127 Could not connect to (Client: %{BACULA_HOST:[bacula][client][name]}|Storage daemon) on %{IPORHOST:[client][address]}:%{POSINT:[client][port]:int}. ERR=%{GREEDYDATA:[error][message]} +BACULA_LOG_NO_AUTH Fatal error: Unable to authenticate with File daemon at \"?%{IPORHOST:[client][address]}(?::%{POSINT:[client][port]:int})?\"?. Possible causes: +BACULA_LOG_NOSUIT No prior or suitable Full backup found in catalog. Doing FULL backup. +BACULA_LOG_NOPRIOR No prior Full backup Job record found. + +BACULA_LOG_JOB (Error: )?Bacula %{BACULA_HOST} %{BACULA_VERSION} \(%{BACULA_VERSION}\): + +BACULA_LOG %{BACULA_TIMESTAMP:timestamp} %{BACULA_HOST:[host][hostname]}(?: JobId %{INT:[bacula][job][id]})?:? (%{BACULA_LOG_MAX_CAPACITY}|%{BACULA_LOG_END_VOLUME}|%{BACULA_LOG_NEW_VOLUME}|%{BACULA_LOG_NEW_LABEL}|%{BACULA_LOG_WROTE_LABEL}|%{BACULA_LOG_NEW_MOUNT}|%{BACULA_LOG_NOOPEN}|%{BACULA_LOG_NOOPENDIR}|%{BACULA_LOG_NOSTAT}|%{BACULA_LOG_NOJOBS}|%{BACULA_LOG_ALL_RECORDS_PRUNED}|%{BACULA_LOG_BEGIN_PRUNE_JOBS}|%{BACULA_LOG_BEGIN_PRUNE_FILES}|%{BACULA_LOG_PRUNED_JOBS}|%{BACULA_LOG_PRUNED_FILES}|%{BACULA_LOG_ENDPRUNE}|%{BACULA_LOG_STARTJOB}|%{BACULA_LOG_STARTRESTORE}|%{BACULA_LOG_USEDEVICE}|%{BACULA_LOG_DIFF_FS}|%{BACULA_LOG_JOBEND}|%{BACULA_LOG_NOPRUNE_JOBS}|%{BACULA_LOG_NOPRUNE_FILES}|%{BACULA_LOG_VOLUME_PREVWRITTEN}|%{BACULA_LOG_READYAPPEND}|%{BACULA_LOG_CANCELLING}|%{BACULA_LOG_MARKCANCEL}|%{BACULA_LOG_CLIENT_RBJ}|%{BACULA_LOG_VSS}|%{BACULA_LOG_MAXSTART}|%{BACULA_LOG_DUPLICATE}|%{BACULA_LOG_NOJOBSTAT}|%{BACULA_LOG_FATAL_CONN}|%{BACULA_LOG_NO_CONNECT}|%{BACULA_LOG_NO_AUTH}|%{BACULA_LOG_NOSUIT}|%{BACULA_LOG_JOB}|%{BACULA_LOG_NOPRIOR}) +# old (deprecated) name : +BACULA_LOGLINE %{BACULA_LOG} diff --git a/plugin-transforms-grok/src/main/resources/patterns/bind b/plugin-transforms-grok/src/main/resources/patterns/bind new file mode 100644 index 0000000..ddb7505 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/bind @@ -0,0 +1,20 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/bind +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +BIND9_TIMESTAMP %{MONTHDAY}[-]%{MONTH}[-]%{YEAR} %{TIME} + +BIND9_DNSTYPE (?:A|AAAA|CAA|CDNSKEY|CDS|CERT|CNAME|CSYNC|DLV|DNAME|DNSKEY|DS|HINFO|LOC|MX|NAPTR|NS|NSEC|NSEC3|OPENPGPKEY|PTR|RRSIG|RP|SIG|SMIMEA|SOA|SRV|TSIG|TXT|URI) +BIND9_CATEGORY (?:queries) + +# dns.question.class is static - only 'IN' is supported by Bind9 +# bind.log.question.name is expected to be a 'duplicate' (same as the dns.question.name capture) +BIND9_QUERYLOGBASE client(:? @0x(?:[0-9A-Fa-f]+))? %{IP:[client][ip]}#%{POSINT:[client][port]:int} \(%{GREEDYDATA:[bind][log][question][name]}\): query: %{GREEDYDATA:[dns][question][name]} (?<[dns][question][class]>IN) %{BIND9_DNSTYPE:[dns][question][type]}(:? %{DATA:[bind][log][question][flags]})? \(%{IP:[server][ip]}\) + +# for query-logging category and severity are always fixed as "queries: info: " +BIND9_QUERYLOG %{BIND9_TIMESTAMP:timestamp} %{BIND9_CATEGORY:[bind][log][category]}: %{LOGLEVEL:[log][level]}: %{BIND9_QUERYLOGBASE} + +BIND9 %{BIND9_QUERYLOG} diff --git a/plugin-transforms-grok/src/main/resources/patterns/bro b/plugin-transforms-grok/src/main/resources/patterns/bro new file mode 100644 index 0000000..5abf3ef --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/bro @@ -0,0 +1,37 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/bro +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# supports the 'old' BRO log files, for updated Zeek log format see the patters/ecs-v1/zeek +# https://www.bro.org/sphinx/script-reference/log-files.html + +BRO_BOOL [TF] +BRO_DATA [^\t]+ + +# http.log - old format (before the Zeek rename) : +BRO_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{INT:[zeek][http][trans_depth]:int}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{BRO_DATA:[url][domain]})\t(?:-|%{BRO_DATA:[url][original]})\t(?:-|%{BRO_DATA:[http][request][referrer]})\t(?:-|%{BRO_DATA:[user_agent][original]})\t(?:-|%{NUMBER:[http][request][body][bytes]:int})\t(?:-|%{NUMBER:[http][response][body][bytes]:int})\t(?:-|%{POSINT:[http][response][status_code]:int})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:int})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:-|%{BRO_DATA:[zeek][http][filename]})\t(?:\(empty\)|%{BRO_DATA:[zeek][http][tags]})\t(?:-|%{BRO_DATA:[url][username]})\t(?:-|%{BRO_DATA:[url][password]})\t(?:-|%{BRO_DATA:[zeek][http][proxied]})\t(?:-|%{BRO_DATA:[zeek][http][orig_fuids]})\t(?:-|%{BRO_DATA:[http][request][mime_type]})\t(?:-|%{BRO_DATA:[zeek][http][resp_fuids]})\t(?:-|%{BRO_DATA:[http][response][mime_type]}) +# :long - %{NUMBER:[http][request][body][bytes]:int} +# :long - %{NUMBER:[http][response][body][bytes]:int} + +# dns.log - old format +BRO_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:int})\t(?:-|%{BRO_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:int})\t(?:-|%{BRO_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:int})\t(?:-|%{BRO_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:int})\t(?:-|%{BRO_DATA:[dns][response_code]})\t(?:-|%{BRO_BOOL:[zeek][dns][AA]})\t(?:-|%{BRO_BOOL:[zeek][dns][TC]})\t(?:-|%{BRO_BOOL:[zeek][dns][RD]})\t(?:-|%{BRO_BOOL:[zeek][dns][RA]})\t(?:-|%{NONNEGINT:[zeek][dns][Z]:int})\t(?:-|%{BRO_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{BRO_BOOL:[zeek][dns][rejected]}) + +# conn.log - old bro, also supports 'newer' format (optional *zeek.connection.local_resp* flag) compared to non-ecs mode +BRO_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{BRO_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:int})\t(?:-|%{INT:[zeek][connection][resp_bytes]:int})\t(?:-|%{BRO_DATA:[zeek][connection][state]})\t(?:-|%{BRO_BOOL:[zeek][connection][local_orig]})\t(?:(?:-|%{BRO_BOOL:[zeek][connection][local_resp]})\t)?(?:-|%{INT:[zeek][connection][missed_bytes]:int})\t(?:-|%{BRO_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:int})\t(?:-|%{INT:[source][bytes]:int})\t(?:-|%{INT:[destination][packets]:int})\t(?:-|%{INT:[destination][bytes]:int})\t(?:\(empty\)|%{BRO_DATA:[zeek][connection][tunnel_parents]}) +# :long - %{INT:[zeek][connection][orig_bytes]:int} +# :long - %{INT:[zeek][connection][resp_bytes]:int} +# :long - %{INT:[zeek][connection][missed_bytes]:int} +# :long - %{INT:[source][packets]:int} +# :long - %{INT:[source][bytes]:int} +# :long - %{INT:[destination][packets]:int} +# :long - %{INT:[destination][bytes]:int} + +# files.log - old format +BRO_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t(?:-|%{IP:[server][ip]})\t(?:-|%{IP:[client][ip]})\t(?:-|%{BRO_DATA:[zeek][files][session_ids]})\t(?:-|%{BRO_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:int})\t(?:-|%{BRO_DATA:[zeek][files][analyzers]})\t(?:-|%{BRO_DATA:[file][mime_type]})\t(?:-|%{BRO_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{BRO_DATA:[zeek][files][local_orig]})\t(?:-|%{BRO_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:int})\t(?:-|%{INT:[file][size]:int})\t(?:-|%{INT:[zeek][files][missing_bytes]:int})\t(?:-|%{INT:[zeek][files][overflow_bytes]:int})\t(?:-|%{BRO_BOOL:[zeek][files][timedout]})\t(?:-|%{BRO_DATA:[zeek][files][parent_fuid]})\t(?:-|%{BRO_DATA:[file][hash][md5]})\t(?:-|%{BRO_DATA:[file][hash][sha1]})\t(?:-|%{BRO_DATA:[file][hash][sha256]})\t(?:-|%{BRO_DATA:[zeek][files][extracted]}) +# :long - %{INT:[zeek][files][seen_bytes]:int} +# :long - %{INT:[file][size]:int} +# :long - %{INT:[zeek][files][missing_bytes]:int} +# :long - %{INT:[zeek][files][overflow_bytes]:int} diff --git a/plugin-transforms-grok/src/main/resources/patterns/exim b/plugin-transforms-grok/src/main/resources/patterns/exim new file mode 100644 index 0000000..ea58eba --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/exim @@ -0,0 +1,33 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/exim +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +EXIM_MSGID [0-9A-Za-z]{6}-[0-9A-Za-z]{6}-[0-9A-Za-z]{2} +# <= message arrival +# => normal message delivery +# -> additional address in same delivery +# *> delivery suppressed by -N +# ** delivery failed; address bounced +# == delivery deferred; temporary problem +EXIM_FLAGS (?:<=|=>|->|\*>|\*\*|==|<>|>>) +EXIM_DATE (:?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}) +EXIM_PID \[%{POSINT:[process][pid]:int}\] +EXIM_QT ((\d+y)?(\d+w)?(\d+d)?(\d+h)?(\d+m)?(\d+s)?) +EXIM_EXCLUDE_TERMS (Message is frozen|(Start|End) queue run| Warning: | retry time not reached | no (IP address|host name) found for (IP address|host) | unexpected disconnection while reading SMTP command | no immediate delivery: |another process is handling this message) +EXIM_REMOTE_HOST (H=(%{NOTSPACE:[source][address]} )?(\(%{NOTSPACE:[exim][log][remote_address]}\) )?\[%{IP:[source][ip]}\](?::%{POSINT:[source][port]:int})?) +EXIM_INTERFACE (I=\[%{IP:[destination][ip]}\](?::%{NUMBER:[destination][port]:int})) +EXIM_PROTOCOL (P=%{NOTSPACE:[network][protocol]}) +EXIM_MSG_SIZE (S=%{NUMBER:[exim][log][message][size]:int}) +EXIM_HEADER_ID (id=%{NOTSPACE:[exim][log][header_id]}) +EXIM_QUOTED_CONTENT (?:\\.|[^\\"])* +EXIM_SUBJECT (T="%{EXIM_QUOTED_CONTENT:[exim][log][message][subject]}") + +EXIM_UNKNOWN_FIELD (?:[A-Za-z0-9]{1,4}=(?:%{QUOTEDSTRING}|%{NOTSPACE})) +EXIM_NAMED_FIELDS (?: (?:%{EXIM_REMOTE_HOST}|%{EXIM_INTERFACE}|%{EXIM_PROTOCOL}|%{EXIM_MSG_SIZE}|%{EXIM_HEADER_ID}|%{EXIM_SUBJECT}|%{EXIM_UNKNOWN_FIELD}))* + +EXIM_MESSAGE_ARRIVAL %{EXIM_DATE:timestamp} (?:%{EXIM_PID} )?%{EXIM_MSGID:[exim][log][message][id]} (?<[exim][log][flags]><=) (?<[exim][log][status]>[a-z:] )?%{EMAILADDRESS:[exim][log][sender][email]}%{EXIM_NAMED_FIELDS}(?:(?: from ?)? for %{EMAILADDRESS:[exim][log][recipient][email]})? + +EXIM %{EXIM_MESSAGE_ARRIVAL} diff --git a/plugin-transforms-grok/src/main/resources/patterns/firewalls b/plugin-transforms-grok/src/main/resources/patterns/firewalls new file mode 100644 index 0000000..cd6c351 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/firewalls @@ -0,0 +1,118 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/firewall +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# NetScreen firewall logs +NETSCREENSESSIONLOG %{SYSLOGTIMESTAMP:timestamp} %{IPORHOST:[observer][hostname]} %{NOTSPACE:[observer][name]}: (?<[observer][product]>NetScreen) device_id=%{WORD:[netscreen][device_id]} .*?(system-\w+-%{NONNEGINT:[event][code]}\(%{WORD:[netscreen][session][type]}\))?: start_time="%{DATA:[netscreen][session][start_time]}" duration=%{INT:[netscreen][session][duration]:int} policy_id=%{INT:[netscreen][policy_id]} service=%{DATA:[netscreen][service]} proto=%{INT:[netscreen][protocol_number]:int} src zone=%{WORD:[observer][ingress][zone]} dst zone=%{WORD:[observer][egress][zone]} action=%{WORD:[event][action]} sent=%{INT:[source][bytes]:int} rcvd=%{INT:[destination][bytes]:int} src=%{IPORHOST:[source][address]} dst=%{IPORHOST:[destination][address]}(?: src_port=%{INT:[source][port]:int} dst_port=%{INT:[destination][port]:int})?(?: src-xlated ip=%{IP:[source][nat][ip]} port=%{INT:[source][nat][port]:int} dst-xlated ip=%{IP:[destination][nat][ip]} port=%{INT:[destination][nat][port]:int})?(?: session_id=%{INT:[netscreen][session][id]} reason=%{GREEDYDATA:[netscreen][session][reason]})? +# :long - %{INT:[source][bytes]:int} +# :long - %{INT:[destination][bytes]:int} + +#== Cisco ASA == +CISCO_TAGGED_SYSLOG ^<%{POSINT:[log][syslog][priority]:int}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:[host][hostname]})? ?: %%{CISCOTAG:[cisco][asa][tag]}: +CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME} +CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+) +# Common Particles +CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted +CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)* +CISCO_DIRECTION Inbound|inbound|Outbound|outbound +CISCO_INTERVAL first hit|%{INT}-second interval +CISCO_XLATE_TYPE static|dynamic +# helpers +CISCO_HITCOUNT_INTERVAL hit-cnt %{INT:[cisco][asa][hit_count]:int} (?:first hit|%{INT:[cisco][asa][interval]:int}-second interval) +CISCO_SRC_IP_USER %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}(?:\(%{DATA:[source][user][name]}\))? +CISCO_DST_IP_USER %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}(?:\(%{DATA:[destination][user][name]}\))? +CISCO_SRC_HOST_PORT_USER %{NOTSPACE:[observer][ingress][interface][name]}:(?:(?:%{IP:[source][ip]})|(?:%{HOSTNAME:[source][address]}))(?:/%{INT:[source][port]:int})?(?:\(%{DATA:[source][user][name]}\))? +CISCO_DST_HOST_PORT_USER %{NOTSPACE:[observer][egress][interface][name]}:(?:(?:%{IP:[destination][ip]})|(?:%{HOSTNAME:[destination][address]}))(?:/%{INT:[destination][port]:int})?(?:\(%{DATA:[destination][user][name]}\))? +# ASA-1-104001 +CISCOFW104001 \((?:Primary|Secondary)\) Switching to ACTIVE - %{GREEDYDATA:[event][reason]} +# ASA-1-104002 +CISCOFW104002 \((?:Primary|Secondary)\) Switching to STANDBY - %{GREEDYDATA:[event][reason]} +# ASA-1-104003 +CISCOFW104003 \((?:Primary|Secondary)\) Switching to FAILED\. +# ASA-1-104004 +CISCOFW104004 \((?:Primary|Secondary)\) Switching to OK\. +# ASA-1-105003 +CISCOFW105003 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:[network][interface][name]} waiting +# ASA-1-105004 +CISCOFW105004 \((?:Primary|Secondary)\) Monitoring on [Ii]nterface %{NOTSPACE:[network][interface][name]} normal +# ASA-1-105005 +CISCOFW105005 \((?:Primary|Secondary)\) Lost Failover communications with mate on [Ii]nterface %{NOTSPACE:[network][interface][name]} +# ASA-1-105008 +CISCOFW105008 \((?:Primary|Secondary)\) Testing [Ii]nterface %{NOTSPACE:[network][interface][name]} +# ASA-1-105009 +CISCOFW105009 \((?:Primary|Secondary)\) Testing on [Ii]nterface %{NOTSPACE:[network][interface][name]} (?:Passed|Failed) +# ASA-2-106001 +CISCOFW106001 %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} connection %{CISCO_ACTION:[cisco][asa][outcome]} from %{IP:[source][ip]}/%{INT:[source][port]:int} to %{IP:[destination][ip]}/%{INT:[destination][port]:int} flags %{DATA:[cisco][asa][tcp_flags]} on interface %{NOTSPACE:[observer][egress][interface][name]} +# ASA-2-106006, ASA-2-106007, ASA-2-106010 +CISCOFW106006_106007_106010 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} (?:from|src) %{IP:[source][ip]}/%{INT:[source][port]:int}(?:\(%{DATA:[source][user][name]}\))? (?:to|dst) %{IP:[destination][ip]}/%{INT:[destination][port]:int}(?:\(%{DATA:[destination][user][name]}\))? (?:(?:on interface %{NOTSPACE:[observer][egress][interface][name]})|(?:due to %{CISCO_REASON:[event][reason]})) +# ASA-3-106014 +CISCOFW106014 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{WORD:[cisco][asa][network][transport]} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER}\s?\(type %{INT:[cisco][asa][icmp_type]:int}, code %{INT:[cisco][asa][icmp_code]:int}\) +# ASA-6-106015 +CISCOFW106015 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} \(%{DATA:[cisco][asa][rule_name]}\) from %{IP:[source][ip]}/%{INT:[source][port]:int} to %{IP:[destination][ip]}/%{INT:[destination][port]:int} flags %{DATA:[cisco][asa][tcp_flags]} on interface %{NOTSPACE:[observer][egress][interface][name]} +# ASA-1-106021 +CISCOFW106021 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} reverse path check from %{IP:[source][ip]} to %{IP:[destination][ip]} on interface %{NOTSPACE:[observer][egress][interface][name]} +# ASA-4-106023 +CISCOFW106023 %{CISCO_ACTION:[cisco][asa][outcome]}(?: protocol)? %{WORD:[cisco][asa][network][transport]} src %{CISCO_SRC_HOST_PORT_USER} dst %{CISCO_DST_HOST_PORT_USER}( \(type %{INT:[cisco][asa][icmp_type]:int}, code %{INT:[cisco][asa][icmp_code]:int}\))? by access-group "?%{DATA:[cisco][asa][rule_name]}"? \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\] +# ASA-4-106100, ASA-4-106102, ASA-4-106103 +CISCOFW106100_2_3 access-list %{NOTSPACE:[cisco][asa][rule_name]} %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} for user '%{DATA:[user][name]}' %{DATA:[observer][ingress][interface][name]}/%{IP:[source][ip]}\(%{INT:[source][port]:int}\) -> %{DATA:[observer][egress][interface][name]}/%{IP:[destination][ip]}\(%{INT:[destination][port]:int}\) %{CISCO_HITCOUNT_INTERVAL} \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\] +# ASA-5-106100 +CISCOFW106100 access-list %{NOTSPACE:[cisco][asa][rule_name]} %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} %{DATA:[observer][ingress][interface][name]}/%{IP:[source][ip]}\(%{INT:[source][port]:int}\)(?:\(%{DATA:[source][user][name]}\))? -> %{DATA:[observer][egress][interface][name]}/%{IP:[destination][ip]}\(%{INT:[destination][port]:int}\)(?:\(%{DATA:[source][user][name]}\))? hit-cnt %{INT:[cisco][asa][hit_count]:int} %{CISCO_INTERVAL} \[%{DATA:[@metadata][cisco][asa][hashcode1]}, %{DATA:[@metadata][cisco][asa][hashcode2]}\] +# ASA-5-304001 +CISCOFW304001 %{IP:[source][ip]}(?:\(%{DATA:[source][user][name]}\))? Accessed URL %{IP:[destination][ip]}:%{GREEDYDATA:[url][original]} +# ASA-6-110002 +CISCOFW110002 %{CISCO_REASON:[event][reason]} for %{WORD:[cisco][asa][network][transport]} from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:int} to %{IP:[destination][ip]}/%{INT:[destination][port]:int} +# ASA-6-302010 +CISCOFW302010 %{INT:[cisco][asa][connections][in_use]:int} in use, %{INT:[cisco][asa][connections][most_used]:int} most used +# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016 +CISCOFW302013_302014_302015_302016 %{CISCO_ACTION:[cisco][asa][outcome]}(?: %{CISCO_DIRECTION:[cisco][asa][network][direction]})? %{WORD:[cisco][asa][network][transport]} connection %{INT:[cisco][asa][connection_id]} for %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:int}(?: \(%{IP:[source][nat][ip]}/%{INT:[source][nat][port]:int}\))?(?:\(%{DATA:[source][user][name]}\))? to %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:int}( \(%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:int}\))?(?:\(%{DATA:[destination][user][name]}\))?( duration %{TIME:[cisco][asa][duration]} bytes %{INT:[network][bytes]:int})?(?: %{CISCO_REASON:[event][reason]})?(?: \(%{DATA:[user][name]}\))? +# :long - %{INT:[network][bytes]:int} +# ASA-6-302020, ASA-6-302021 +CISCOFW302020_302021 %{CISCO_ACTION:[cisco][asa][outcome]}(?: %{CISCO_DIRECTION:[cisco][asa][network][direction]})? %{WORD:[cisco][asa][network][transport]} connection for faddr %{IP:[destination][ip]}/%{INT:[cisco][asa][icmp_seq]:int}(?:\(%{DATA:[destination][user][name]}\))? gaddr %{IP:[source][nat][ip]}/%{INT:[cisco][asa][icmp_type]:int} laddr %{IP:[source][ip]}/%{INT}(?: \(%{DATA:[source][user][name]}\))? +# ASA-6-305011 +CISCOFW305011 %{CISCO_ACTION:[cisco][asa][outcome]} %{CISCO_XLATE_TYPE} %{WORD:[cisco][asa][network][transport]} translation from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}(/%{INT:[source][port]:int})?(?:\(%{DATA:[source][user][name]}\))? to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:int} +# ASA-3-313001, ASA-3-313004, ASA-3-313008 +CISCOFW313001_313004_313008 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} type=%{INT:[cisco][asa][icmp_type]:int}, code=%{INT:[cisco][asa][icmp_code]:int} from %{IP:[source][ip]} on interface %{NOTSPACE:[observer][egress][interface][name]}(?: to %{IP:[destination][ip]})? +# ASA-4-313005 +CISCOFW313005 %{CISCO_REASON:[event][reason]} for %{WORD:[cisco][asa][network][transport]} error message: %{WORD} src %{CISCO_SRC_IP_USER} dst %{CISCO_DST_IP_USER} \(type %{INT:[cisco][asa][icmp_type]:int}, code %{INT:[cisco][asa][icmp_code]:int}\) on %{NOTSPACE} interface\.\s+Original IP payload: %{WORD:[cisco][asa][original_ip_payload][network][transport]} src %{IP:[cisco][asa][original_ip_payload][source][ip]}/%{INT:[cisco][asa][original_ip_payload][source][port]:int}(?:\(%{DATA:[cisco][asa][original_ip_payload][source][user][name]}\))? dst %{IP:[cisco][asa][original_ip_payload][destination][ip]}/%{INT:[cisco][asa][original_ip_payload][destination][port]:int}(?:\(%{DATA:[cisco][asa][original_ip_payload][destination][user][name]}\))? +# ASA-5-321001 +CISCOFW321001 Resource '%{DATA:[cisco][asa][resource][name]}' limit of %{POSINT:[cisco][asa][resource][limit]:int} reached for system +# ASA-4-402117 +CISCOFW402117 %{WORD:[cisco][asa][network][type]}: Received a non-IPSec packet \(protocol=\s?%{WORD:[cisco][asa][network][transport]}\) from %{IP:[source][ip]} to %{IP:[destination][ip]}\.? +# ASA-4-402119 +CISCOFW402119 %{WORD:[cisco][asa][network][type]}: Received an %{WORD:[cisco][asa][ipsec][protocol]} packet \(SPI=\s?%{DATA:[cisco][asa][ipsec][spi]}, sequence number=\s?%{DATA:[cisco][asa][ipsec][seq_num]}\) from %{IP:[source][ip]} \(user=\s?%{DATA:[source][user][name]}\) to %{IP:[destination][ip]} that failed anti-replay checking\.? +# ASA-4-419001 +CISCOFW419001 %{CISCO_ACTION:[cisco][asa][outcome]} %{WORD:[cisco][asa][network][transport]} packet from %{NOTSPACE:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:int} to %{NOTSPACE:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:int}, reason: %{GREEDYDATA:[event][reason]} +# ASA-4-419002 +CISCOFW419002 %{CISCO_REASON:[event][reason]} from %{DATA:[observer][ingress][interface][name]}:%{IP:[source][ip]}/%{INT:[source][port]:int} to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:int} with different initial sequence number +# ASA-4-500004 +CISCOFW500004 %{CISCO_REASON:[event][reason]} for protocol=%{WORD:[cisco][asa][network][transport]}, from %{IP:[source][ip]}/%{INT:[source][port]:int} to %{IP:[destination][ip]}/%{INT:[destination][port]:int} +# ASA-6-602303, ASA-6-602304 +CISCOFW602303_602304 %{WORD:[cisco][asa][network][type]}: An %{CISCO_DIRECTION:[cisco][asa][network][direction]} %{DATA:[cisco][asa][ipsec][tunnel_type]} SA \(SPI=\s?%{DATA:[cisco][asa][ipsec][spi]}\) between %{IP:[source][ip]} and %{IP:[destination][ip]} \(user=\s?%{DATA:[source][user][name]}\) has been %{CISCO_ACTION:[cisco][asa][outcome]} +# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006 +CISCOFW710001_710002_710003_710005_710006 %{WORD:[cisco][asa][network][transport]} (?:request|access) %{CISCO_ACTION:[cisco][asa][outcome]} from %{IP:[source][ip]}/%{INT:[source][port]:int} to %{DATA:[observer][egress][interface][name]}:%{IP:[destination][ip]}/%{INT:[destination][port]:int} +# ASA-6-713172 +CISCOFW713172 Group = %{DATA:[cisco][asa][source][group]}, IP = %{IP:[source][ip]}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:[@metadata][cisco][asa][remote_nat]}\s*behind a NAT device\s+This\s+end\s*%{DATA:[@metadata][cisco][asa][local_nat]}\s*behind a NAT device +# ASA-4-733100 +CISCOFW733100 \[\s*%{DATA:[cisco][asa][burst][object]}\s*\] drop %{DATA:[cisco][asa][burst][id]} exceeded. Current burst rate is %{INT:[cisco][asa][burst][current_rate]:int} per second, max configured rate is %{INT:[cisco][asa][burst][configured_rate]:int}; Current average rate is %{INT:[cisco][asa][burst][avg_rate]:int} per second, max configured rate is %{INT:[cisco][asa][burst][configured_avg_rate]:int}; Cumulative total count is %{INT:[cisco][asa][burst][cumulative_count]:int} +#== End Cisco ASA == + + +IPTABLES_TCP_FLAGS (CWR |ECE |URG |ACK |PSH |RST |SYN |FIN )* +IPTABLES_TCP_PART (?:SEQ=%{INT:[iptables][tcp][seq]:int}\s+)?(?:ACK=%{INT:[iptables][tcp][ack]:int}\s+)?WINDOW=%{INT:[iptables][tcp][window]:int}\s+RES=0x%{BASE16NUM:[iptables][tcp_reserved_bits]}\s+%{IPTABLES_TCP_FLAGS:[iptables][tcp][flags]} + +IPTABLES4_FRAG (?:(?<= )(?:CE|DF|MF))* +IPTABLES4_PART SRC=%{IPV4:[source][ip]}\s+DST=%{IPV4:[destination][ip]}\s+LEN=(?:%{INT:[iptables][length]:int})?\s+TOS=(?:0|0x%{BASE16NUM:[iptables][tos]})?\s+PREC=(?:0x%{BASE16NUM:[iptables][precedence_bits]})?\s+TTL=(?:%{INT:[iptables][ttl]:int})?\s+ID=(?:%{INT:[iptables][id]})?\s+(?:%{IPTABLES4_FRAG:[iptables][fragment_flags]})?(?:\s+FRAG: %{INT:[iptables][fragment_offset]:int})? +IPTABLES6_PART SRC=%{IPV6:[source][ip]}\s+DST=%{IPV6:[destination][ip]}\s+LEN=(?:%{INT:[iptables][length]:int})?\s+TC=(?:0|0x%{BASE16NUM:[iptables][tos]})?\s+HOPLIMIT=(?:%{INT:[iptables][ttl]:int})?\s+FLOWLBL=(?:%{INT:[iptables][flow_label]})? + +IPTABLES IN=(?:%{NOTSPACE:[observer][ingress][interface][name]})?\s+OUT=(?:%{NOTSPACE:[observer][egress][interface][name]})?\s+(?:MAC=(?:%{COMMONMAC:[destination][mac]})?(?::%{COMMONMAC:[source][mac]})?(?::[A-Fa-f0-9]{2}:[A-Fa-f0-9]{2})?\s+)?(:?%{IPTABLES4_PART}|%{IPTABLES6_PART}).*?PROTO=(?:%{WORD:[network][transport]})?\s+SPT=(?:%{INT:[source][port]:int})?\s+DPT=(?:%{INT:[destination][port]:int})?\s+(?:%{IPTABLES_TCP_PART})? + +# Shorewall firewall logs +SHOREWALL (?:%{SYSLOGTIMESTAMP:timestamp}) (?:%{WORD:[observer][hostname]}) .*Shorewall:(?:%{WORD:[shorewall][firewall][type]})?:(?:%{WORD:[shorewall][firewall][action]})?.*%{IPTABLES} +#== End Shorewall +#== SuSE Firewall 2 == +SFW2_LOG_PREFIX SFW2\-INext\-%{NOTSPACE:[suse][firewall][action]} +SFW2 ((?:%{SYSLOGTIMESTAMP:timestamp})|(?:%{TIMESTAMP_ISO8601:timestamp}))\s*%{HOSTNAME:[observer][hostname]}.*?%{SFW2_LOG_PREFIX:[suse][firewall][log_prefix]}\s*%{IPTABLES} +#== End SuSE == diff --git a/plugin-transforms-grok/src/main/resources/patterns/grok-patterns b/plugin-transforms-grok/src/main/resources/patterns/grok-patterns new file mode 100644 index 0000000..9157aa9 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/grok-patterns @@ -0,0 +1,102 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/grok-patterns +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +USERNAME [a-zA-Z0-9._-]+ +USER %{USERNAME} +EMAILLOCALPART [a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,64}(?:\.[a-zA-Z0-9!#$%&'*+\-/=?^_`{|}~]{1,62}){0,63} +EMAILADDRESS %{EMAILLOCALPART}@%{HOSTNAME} +INT (?:[+-]?(?:[0-9]+)) +BASE10NUM (?[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+))) +NUMBER (?:%{BASE10NUM}) +BASE16NUM (?(?"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``)) +UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12} +# URN, allowing use of RFC 2141 section 2.3 reserved characters +URN urn:[0-9A-Za-z][0-9A-Za-z-]{0,31}:(?:%[0-9a-fA-F]{2}|[0-9A-Za-z()+,.:=@;$_!*'/?#-])+ + +# Networking +MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC}) +CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4}) +WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2}) +COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2}) +IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)? +IPV4 (?[A-Za-z]+:|\\)(?:\\[^\\?*]*)+ +URIPROTO [A-Za-z]([A-Za-z0-9+\-.]+)+ +URIHOST %{IPORHOST}(?::%{POSINT})? +# uripath comes loosely from RFC1738, but mostly from what Firefox doesn't turn into %XX +URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%&_\-]*)+ +URIQUERY [A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]<>]* +# deprecated (kept due compatibility): +URIPARAM \?%{URIQUERY} +URIPATHPARAM %{URIPATH}(?:\?%{URIQUERY})? +URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATH}(?:\?%{URIQUERY})?)? + +# Months: January, Feb, 3, 03, 12, December +MONTH \b(?:[Jj]an(?:uary|uar)?|[Ff]eb(?:ruary|ruar)?|[Mm](?:a|ä)?r(?:ch|z)?|[Aa]pr(?:il)?|[Mm]a(?:y|i)?|[Jj]un(?:e|i)?|[Jj]ul(?:y|i)?|[Aa]ug(?:ust)?|[Ss]ep(?:tember)?|[Oo](?:c|k)?t(?:ober)?|[Nn]ov(?:ember)?|[Dd]e(?:c|z)(?:ember)?)\b +MONTHNUM (?:0?[1-9]|1[0-2]) +MONTHNUM2 (?:0[1-9]|1[0-2]) +MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) + +# Days: Monday, Tue, Thu, etc... +DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?) + +# Years? +YEAR (?>\d\d){1,2} +HOUR (?:2[0123]|[01]?[0-9]) +MINUTE (?:[0-5][0-9]) +# '60' is a leap second in most time standards and thus is valid. +SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) +TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it) +DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR} +DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR} +ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE})) +ISO8601_SECOND %{SECOND} +TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}? +DATE %{DATE_US}|%{DATE_EU} +DATESTAMP %{DATE}[- ]%{TIME} +TZ (?:[APMCE][SD]T|UTC) +DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ} +DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE} +DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR} +DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND} + +# Syslog Dates: Month Day HH:MM:SS +SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME} +PROG [\x21-\x5a\x5c\x5e-\x7e]+ +SYSLOGPROG %{PROG:[process][name]}(?:\[%{POSINT:[process][pid]:int}\])? +SYSLOGHOST %{IPORHOST} +SYSLOGFACILITY <%{NONNEGINT:[log][syslog][facility][code]:int}.%{NONNEGINT:[log][syslog][priority]:int}> +HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT} + +# Shortcuts +QS %{QUOTEDSTRING} + +# Log formats +SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:[host][hostname]} %{SYSLOGPROG}: + +# Log Levels +LOGLEVEL ([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?) diff --git a/plugin-transforms-grok/src/main/resources/patterns/haproxy b/plugin-transforms-grok/src/main/resources/patterns/haproxy new file mode 100644 index 0000000..a3ab8fd --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/haproxy @@ -0,0 +1,46 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/haproxy +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +HAPROXYTIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) +HAPROXYDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{HAPROXYTIME}.%{INT} + +# Override these default patterns to parse out what is captured in your haproxy.cfg +HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][captured_headers]} +HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[haproxy][http][response][captured_headers]} + +# Example: +# These haproxy config lines will add data to the logs that are captured +# by the patterns below. Place them in your custom patterns directory to +# override the defaults. +# +# capture request header Host len 40 +# capture request header X-Forwarded-For len 50 +# capture request header Accept-Language len 50 +# capture request header Referer len 200 +# capture request header User-Agent len 200 +# +# capture response header Content-Type len 30 +# capture response header Content-Encoding len 10 +# capture response header Cache-Control len 200 +# capture response header Last-Modified len 200 +# +# HAPROXYCAPTUREDREQUESTHEADERS %{DATA:[haproxy][http][request][host]}\|%{DATA:[haproxy][http][request][x_forwarded_for]}\|%{DATA:[haproxy][http][request][accept_language]}\|%{DATA:[http][request][referrer]}\|%{DATA:[user_agent][original]} +# HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:[http][response][mime_type]}\|%{DATA:[haproxy][http][response][encoding]}\|%{DATA:[haproxy][http][response][cache_control]}\|%{DATA:[haproxy][http][response][last_modified]} + +HAPROXYURI (?:%{URIPROTO:[url][scheme]}://)?(?:%{USER:[url][username]}(?::[^@]*)?@)?(?:%{IPORHOST:[url][domain]}(?::%{POSINT:[url][port]:int})?)?(?:%{URIPATH:[url][path]}(?:\?%{URIQUERY:[url][query]})?)? + +HAPROXYHTTPREQUESTLINE (?:|(?:%{WORD:[http][request][method]} %{HAPROXYURI:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?)) + +# parse a haproxy 'httplog' line +HAPROXYHTTPBASE %{IP:[source][address]}:%{INT:[source][port]:int} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][http][request][time_wait_ms]:int})/(?:-1|%{INT:[haproxy][total_waiting_time_ms]:int})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:int})/(?:-1|%{INT:[haproxy][http][request][time_wait_without_data_ms]:int})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[http][response][status_code]:int} %{INT:[source][bytes]:int} (?:-|%{DATA:[haproxy][http][request][captured_cookie]}) (?:-|%{DATA:[haproxy][http][response][captured_cookie]}) %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:int}/%{INT:[haproxy][connections][frontend]:int}/%{INT:[haproxy][connections][backend]:int}/%{INT:[haproxy][connections][server]:int}/%{INT:[haproxy][connections][retries]:int} %{INT:[haproxy][server_queue]:int}/%{INT:[haproxy][backend_queue]:int}(?: \{%{HAPROXYCAPTUREDREQUESTHEADERS}\}(?: \{%{HAPROXYCAPTUREDRESPONSEHEADERS}\})?)?(?: "%{HAPROXYHTTPREQUESTLINE}"?)? +# :long - %{INT:[source][bytes]:int} + +HAPROXYHTTP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{HAPROXYHTTPBASE} + +# parse a haproxy 'tcplog' line +HAPROXYTCP (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp}) %{IPORHOST:[host][hostname]} %{SYSLOGPROG}: %{IP:[source][address]}:%{INT:[source][port]:int} \[%{HAPROXYDATE:[haproxy][request_date]}\] %{NOTSPACE:[haproxy][frontend_name]} %{NOTSPACE:[haproxy][backend_name]}/(?:|%{NOTSPACE:[haproxy][server_name]}) (?:-1|%{INT:[haproxy][total_waiting_time_ms]:int})/(?:-1|%{INT:[haproxy][connection_wait_time_ms]:int})/%{NOTSPACE:[haproxy][total_time_ms]} %{INT:[source][bytes]:int} %{NOTSPACE:[haproxy][termination_state]} %{INT:[haproxy][connections][active]:int}/%{INT:[haproxy][connections][frontend]:int}/%{INT:[haproxy][connections][backend]:int}/%{INT:[haproxy][connections][server]:int}/%{INT:[haproxy][connections][retries]:int} %{INT:[haproxy][server_queue]:int}/%{INT:[haproxy][backend_queue]:int} +# :long - %{INT:[source][bytes]:int} diff --git a/plugin-transforms-grok/src/main/resources/patterns/httpd b/plugin-transforms-grok/src/main/resources/patterns/httpd new file mode 100644 index 0000000..f035c86 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/httpd @@ -0,0 +1,24 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/httpd +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +HTTPDUSER %{EMAILADDRESS}|%{USER} +HTTPDERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} + +# Log formats +HTTPD_COMMONLOG %{IPORHOST:[source][address]} (?:-|%{HTTPDUSER:[apache][access][user][identity]}) (?:-|%{HTTPDUSER:[user][name]}) \[%{HTTPDATE:timestamp}\] "(?:%{WORD:[http][request][method]} %{NOTSPACE:[url][original]}(?: HTTP/%{NUMBER:[http][version]})?|%{DATA})" (?:-|%{INT:[http][response][status_code]:int}) (?:-|%{INT:[http][response][body][bytes]:int}) +# :long - %{INT:[http][response][body][bytes]:int} +HTTPD_COMBINEDLOG %{HTTPD_COMMONLOG} "(?:-|%{DATA:[http][request][referrer]})" "(?:-|%{DATA:[user_agent][original]})" + +# Error logs +HTTPD20_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[%{LOGLEVEL:[log][level]}\] (?:\[client %{IPORHOST:[source][address]}\] )?%{GREEDYDATA:message} +HTTPD24_ERRORLOG \[%{HTTPDERROR_DATE:timestamp}\] \[(?:%{WORD:[apache][error][module]})?:%{LOGLEVEL:[log][level]}\] \[pid %{POSINT:[process][pid]:int}(:tid %{INT:[process][thread][id]:int})?\](?: \(%{POSINT:[apache][error][proxy][error][code]?}\)%{DATA:[apache][error][proxy][error][message]}:)?(?: \[client %{IPORHOST:[source][address]}(?::%{POSINT:[source][port]:int})?\])?(?: %{DATA:[error][code]}:)? %{GREEDYDATA:message} +# :long - %{INT:[process][thread][id]:int} +HTTPD_ERRORLOG %{HTTPD20_ERRORLOG}|%{HTTPD24_ERRORLOG} + +# Deprecated +COMMONAPACHELOG %{HTTPD_COMMONLOG} +COMBINEDAPACHELOG %{HTTPD_COMBINEDLOG} diff --git a/plugin-transforms-grok/src/main/resources/patterns/java b/plugin-transforms-grok/src/main/resources/patterns/java new file mode 100644 index 0000000..bf250ec --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/java @@ -0,0 +1,41 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/java +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +JAVACLASS (?:[a-zA-Z$_][a-zA-Z$_0-9]*\.)*[a-zA-Z$_][a-zA-Z$_0-9]* +#Space is an allowed character to match special cases like 'Native Method' or 'Unknown Source' +JAVAFILE (?:[a-zA-Z$_0-9. -]+) +#Allow special , methods +JAVAMETHOD (?:(<(?:cl)?init>)|[a-zA-Z$_][a-zA-Z$_0-9]*) +#Line number is optional in special cases 'Native method' or 'Unknown source' +JAVASTACKTRACEPART %{SPACE}at %{JAVACLASS:[java][log][origin][class][name]}\.%{JAVAMETHOD:[log][origin][function]}\(%{JAVAFILE:[log][origin][file][name]}(?::%{INT:[log][origin][file][line]:int})?\) +# Java Logs +JAVATHREAD (?:[A-Z]{2}-Processor[\d]+) +JAVALOGMESSAGE (?:.*) + +# MMM dd, yyyy HH:mm:ss eg: Jan 9, 2014 7:13:13 AM +# matches default logging configuration in Tomcat 4.1, 5.0, 5.5, 6.0, 7.0 +CATALINA7_DATESTAMP %{MONTH} %{MONTHDAY}, %{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} (?:AM|PM) +CATALINA7_LOG %{CATALINA7_DATESTAMP:timestamp} %{JAVACLASS:[java][log][origin][class][name]}(?: %{JAVAMETHOD:[log][origin][function]})?\s*(?:%{LOGLEVEL:[log][level]}:)? %{JAVALOGMESSAGE:message} + +# 31-Jul-2020 16:40:38.578 in Tomcat 8.5/9.0 +CATALINA8_DATESTAMP %{MONTHDAY}-%{MONTH}-%{YEAR} %{HOUR}:%{MINUTE}:%{SECOND} +CATALINA8_LOG %{CATALINA8_DATESTAMP:timestamp} %{LOGLEVEL:[log][level]} \[%{DATA:[java][log][origin][thread][name]}\] %{JAVACLASS:[java][log][origin][class][name]}\.(?:%{JAVAMETHOD:[log][origin][function]})? %{JAVALOGMESSAGE:message} + +CATALINA_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP}) +CATALINALOG (?:%{CATALINA8_LOG})|(?:%{CATALINA7_LOG}) + +# in Tomcat 5.5, 6.0, 7.0 it is the same as catalina.out logging format +TOMCAT7_LOG %{CATALINA7_LOG} +TOMCAT8_LOG %{CATALINA8_LOG} + +# NOTE: a weird log we started with - not sure what TC version this should match out of the box (due the | delimiters) +TOMCATLEGACY_DATESTAMP %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}(?: %{ISO8601_TIMEZONE})? +TOMCATLEGACY_LOG %{TOMCATLEGACY_DATESTAMP:timestamp} \| %{LOGLEVEL:[log][level]} \| %{JAVACLASS:[java][log][origin][class][name]} - %{JAVALOGMESSAGE:message} + +TOMCAT_DATESTAMP (?:%{CATALINA8_DATESTAMP})|(?:%{CATALINA7_DATESTAMP})|(?:%{TOMCATLEGACY_DATESTAMP}) + +TOMCATLOG (?:%{TOMCAT8_LOG})|(?:%{TOMCAT7_LOG})|(?:%{TOMCATLEGACY_LOG}) diff --git a/plugin-transforms-grok/src/main/resources/patterns/junos b/plugin-transforms-grok/src/main/resources/patterns/junos new file mode 100644 index 0000000..e70c04c --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/junos @@ -0,0 +1,20 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/junos +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# JUNOS 11.4 RT_FLOW patterns +RT_FLOW_TAG (?:RT_FLOW_SESSION_CREATE|RT_FLOW_SESSION_CLOSE|RT_FLOW_SESSION_DENY) +# deprecated legacy name: +RT_FLOW_EVENT RT_FLOW_TAG + +RT_FLOW1 %{RT_FLOW_TAG:[juniper][srx][tag]}: %{GREEDYDATA:[juniper][srx][reason]}: %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:int}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:int} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} \d+\(%{INT:[source][bytes]:int}\) \d+\(%{INT:[destination][bytes]:int}\) %{INT:[juniper][srx][elapsed_time]:int} .* +# :long - %{INT:[source][bytes]:int} +# :long - %{INT:[destination][bytes]:int} + +RT_FLOW2 %{RT_FLOW_TAG:[juniper][srx][tag]}: session created %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{IP:[source][nat][ip]}/%{INT:[source][nat][port]:int}->%{IP:[destination][nat][ip]}/%{INT:[destination][nat][port]:int} (?:(?:None)|(?:%{DATA:[juniper][srx][src_nat_rule_name]})) (?:(?:None)|(?:%{DATA:[juniper][srx][dst_nat_rule_name]})) %{INT:[network][iana_number]} %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} %{INT:[juniper][srx][session_id]} .* + +RT_FLOW3 %{RT_FLOW_TAG:[juniper][srx][tag]}: session denied %{IP:[source][ip]}/%{INT:[source][port]:int}->%{IP:[destination][ip]}/%{INT:[destination][port]:int} %{DATA:[juniper][srx][service_name]} %{INT:[network][iana_number]}\(\d\) %{DATA:[rule][name]} %{DATA:[observer][ingress][zone]} %{DATA:[observer][egress][zone]} .* + diff --git a/plugin-transforms-grok/src/main/resources/patterns/linux-syslog b/plugin-transforms-grok/src/main/resources/patterns/linux-syslog new file mode 100644 index 0000000..67bd37d --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/linux-syslog @@ -0,0 +1,23 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/linux-syslog +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +SYSLOG5424PRINTASCII [!-~]+ + +SYSLOGBASE2 (?:%{SYSLOGTIMESTAMP:timestamp}|%{TIMESTAMP_ISO8601:timestamp})(?: %{SYSLOGFACILITY})?(?: %{SYSLOGHOST:[host][hostname]})?(?: %{SYSLOGPROG}:)? +SYSLOGPAMSESSION %{SYSLOGBASE} (?=%{GREEDYDATA:message})%{WORD:[system][auth][pam][module]}\(%{DATA:[system][auth][pam][origin]}\): session %{WORD:[system][auth][pam][session_state]} for user %{USERNAME:[user][name]}(?: by %{GREEDYDATA})? + +CRON_ACTION [A-Z ]+ +CRONLOG %{SYSLOGBASE} \(%{USER:[user][name]}\) %{CRON_ACTION:[system][cron][action]} \(%{DATA:message}\) + +SYSLOGLINE %{SYSLOGBASE2} %{GREEDYDATA:message} + +# IETF 5424 syslog(8) format (see http://www.rfc-editor.org/info/rfc5424) +SYSLOG5424PRI <%{NONNEGINT:[log][syslog][priority]:int}> +SYSLOG5424SD \[%{DATA}\]+ +SYSLOG5424BASE %{SYSLOG5424PRI}%{NONNEGINT:[system][syslog][version]} +(?:-|%{TIMESTAMP_ISO8601:timestamp}) +(?:-|%{IPORHOST:[host][hostname]}) +(?:-|%{SYSLOG5424PRINTASCII:[process][name]}) +(?:-|%{POSINT:[process][pid]:int}) +(?:-|%{SYSLOG5424PRINTASCII:[event][code]}) +(?:-|%{SYSLOG5424SD:[system][syslog][structured_data]})? + +SYSLOG5424LINE %{SYSLOG5424BASE} +%{GREEDYDATA:message} diff --git a/plugin-transforms-grok/src/main/resources/patterns/maven b/plugin-transforms-grok/src/main/resources/patterns/maven new file mode 100644 index 0000000..302b715 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/maven @@ -0,0 +1,8 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/linux-maven +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +MAVEN_VERSION (?:(\d+)\.)?(?:(\d+)\.)?(\*|\d+)(?:[.-](RELEASE|SNAPSHOT))? diff --git a/plugin-transforms-grok/src/main/resources/patterns/mcollective b/plugin-transforms-grok/src/main/resources/patterns/mcollective new file mode 100644 index 0000000..98ae7fd --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/mcollective @@ -0,0 +1,11 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/mcollective +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# Remember, these can be multi-line events. +MCOLLECTIVE ., \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:int}\]%{SPACE}%{LOGLEVEL:[log][level]} + +MCOLLECTIVEAUDIT %{TIMESTAMP_ISO8601:timestamp}: diff --git a/plugin-transforms-grok/src/main/resources/patterns/mongodb b/plugin-transforms-grok/src/main/resources/patterns/mongodb new file mode 100644 index 0000000..290117e --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/mongodb @@ -0,0 +1,14 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/mongodb +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +MONGO_LOG %{SYSLOGTIMESTAMP:timestamp} \[%{WORD:[mongodb][component]}\] %{GREEDYDATA:message} +MONGO_QUERY \{ (?<={ ).*(?= } ntoreturn:) \} +MONGO_SLOWQUERY %{WORD:[mongodb][profile][op]} %{MONGO_WORDDASH:[mongodb][database]}\.%{MONGO_WORDDASH:[mongodb][collection]} %{WORD}: %{MONGO_QUERY:[mongodb][query][original]} ntoreturn:%{NONNEGINT:[mongodb][profile][ntoreturn]:int} ntoskip:%{NONNEGINT:[mongodb][profile][ntoskip]:int} nscanned:%{NONNEGINT:[mongodb][profile][nscanned]:int}.*? nreturned:%{NONNEGINT:[mongodb][profile][nreturned]:int}.*? %{INT:[mongodb][profile][duration]:int}ms +MONGO_WORDDASH \b[\w-]+\b +MONGO3_SEVERITY \w +MONGO3_COMPONENT %{WORD} +MONGO3_LOG %{TIMESTAMP_ISO8601:timestamp} %{MONGO3_SEVERITY:[log][level]} (?:-|%{MONGO3_COMPONENT:[mongodb][component]})%{SPACE}(?:\[%{DATA:[mongodb][context]}\])? %{GREEDYDATA:message} diff --git a/plugin-transforms-grok/src/main/resources/patterns/nagios b/plugin-transforms-grok/src/main/resources/patterns/nagios new file mode 100644 index 0000000..74692fa --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/nagios @@ -0,0 +1,131 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/nagios +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +################################################################################## +################################################################################## +# Chop Nagios log files to smithereens! +# +# A set of GROK filters to process logfiles generated by Nagios. +# While it does not, this set intends to cover all possible Nagios logs. +# +# Some more work needs to be done to cover all External Commands: +# http://old.nagios.org/developerinfo/externalcommands/commandlist.php +# +# If you need some support on these rules please contact: +# Jelle Smet http://smetj.net +# +################################################################################# +################################################################################# + +NAGIOSTIME \[%{NUMBER:timestamp}\] + +############################################### +######## Begin nagios log types +############################################### +NAGIOS_TYPE_CURRENT_SERVICE_STATE CURRENT SERVICE STATE +NAGIOS_TYPE_CURRENT_HOST_STATE CURRENT HOST STATE + +NAGIOS_TYPE_SERVICE_NOTIFICATION SERVICE NOTIFICATION +NAGIOS_TYPE_HOST_NOTIFICATION HOST NOTIFICATION + +NAGIOS_TYPE_SERVICE_ALERT SERVICE ALERT +NAGIOS_TYPE_HOST_ALERT HOST ALERT + +NAGIOS_TYPE_SERVICE_FLAPPING_ALERT SERVICE FLAPPING ALERT +NAGIOS_TYPE_HOST_FLAPPING_ALERT HOST FLAPPING ALERT + +NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT SERVICE DOWNTIME ALERT +NAGIOS_TYPE_HOST_DOWNTIME_ALERT HOST DOWNTIME ALERT + +NAGIOS_TYPE_PASSIVE_SERVICE_CHECK PASSIVE SERVICE CHECK +NAGIOS_TYPE_PASSIVE_HOST_CHECK PASSIVE HOST CHECK + +NAGIOS_TYPE_SERVICE_EVENT_HANDLER SERVICE EVENT HANDLER +NAGIOS_TYPE_HOST_EVENT_HANDLER HOST EVENT HANDLER + +NAGIOS_TYPE_EXTERNAL_COMMAND EXTERNAL COMMAND +NAGIOS_TYPE_TIMEPERIOD_TRANSITION TIMEPERIOD TRANSITION +############################################### +######## End nagios log types +############################################### + +############################################### +######## Begin external check types +############################################### +NAGIOS_EC_DISABLE_SVC_CHECK DISABLE_SVC_CHECK +NAGIOS_EC_ENABLE_SVC_CHECK ENABLE_SVC_CHECK +NAGIOS_EC_DISABLE_HOST_CHECK DISABLE_HOST_CHECK +NAGIOS_EC_ENABLE_HOST_CHECK ENABLE_HOST_CHECK +NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT PROCESS_SERVICE_CHECK_RESULT +NAGIOS_EC_PROCESS_HOST_CHECK_RESULT PROCESS_HOST_CHECK_RESULT +NAGIOS_EC_SCHEDULE_SERVICE_DOWNTIME SCHEDULE_SERVICE_DOWNTIME +NAGIOS_EC_SCHEDULE_HOST_DOWNTIME SCHEDULE_HOST_DOWNTIME +NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS DISABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS ENABLE_HOST_SVC_NOTIFICATIONS +NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS DISABLE_HOST_NOTIFICATIONS +NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS ENABLE_HOST_NOTIFICATIONS +NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS DISABLE_SVC_NOTIFICATIONS +NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS ENABLE_SVC_NOTIFICATIONS +############################################### +######## End external check types +############################################### +NAGIOS_WARNING Warning:%{SPACE}%{GREEDYDATA:message} + +NAGIOS_CURRENT_SERVICE_STATE %{NAGIOS_TYPE_CURRENT_SERVICE_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} +NAGIOS_CURRENT_HOST_STATE %{NAGIOS_TYPE_CURRENT_HOST_STATE:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} + +NAGIOS_SERVICE_NOTIFICATION %{NAGIOS_TYPE_SERVICE_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message} +NAGIOS_HOST_NOTIFICATION %{NAGIOS_TYPE_HOST_NOTIFICATION:[nagios][log][type]}: %{DATA:[user][name]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][notification_command]};%{GREEDYDATA:message} + +NAGIOS_SERVICE_ALERT %{NAGIOS_TYPE_SERVICE_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} +NAGIOS_HOST_ALERT %{NAGIOS_TYPE_HOST_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{INT:[nagios][log][attempt]:int};%{GREEDYDATA:message} + +NAGIOS_SERVICE_FLAPPING_ALERT %{NAGIOS_TYPE_SERVICE_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:message} +NAGIOS_HOST_FLAPPING_ALERT %{NAGIOS_TYPE_HOST_FLAPPING_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:message} + +NAGIOS_SERVICE_DOWNTIME_ALERT %{NAGIOS_TYPE_SERVICE_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} +NAGIOS_HOST_DOWNTIME_ALERT %{NAGIOS_TYPE_HOST_DOWNTIME_ALERT:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} + +NAGIOS_PASSIVE_SERVICE_CHECK %{NAGIOS_TYPE_PASSIVE_SERVICE_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} +NAGIOS_PASSIVE_HOST_CHECK %{NAGIOS_TYPE_PASSIVE_HOST_CHECK:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][comment]} + +NAGIOS_SERVICE_EVENT_HANDLER %{NAGIOS_TYPE_SERVICE_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]} +NAGIOS_HOST_EVENT_HANDLER %{NAGIOS_TYPE_HOST_EVENT_HANDLER:[nagios][log][type]}: %{DATA:[host][hostname]};%{DATA:[service][state]};%{DATA:[nagios][log][state_type]};%{DATA:[nagios][log][event_handler_name]} + +NAGIOS_TIMEPERIOD_TRANSITION %{NAGIOS_TYPE_TIMEPERIOD_TRANSITION:[nagios][log][type]}: %{DATA:[service][name]};%{NUMBER:[nagios][log][period_from]:int};%{NUMBER:[nagios][log][period_to]:int} + +#################### +#### External checks +#################### + +#Disable host & service check +NAGIOS_EC_LINE_DISABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]} +NAGIOS_EC_LINE_DISABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]} + +#Enable host & service check +NAGIOS_EC_LINE_ENABLE_SVC_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_CHECK:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]} +NAGIOS_EC_LINE_ENABLE_HOST_CHECK %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_CHECK:[nagios][log][command]};%{DATA:[host][hostname]} + +#Process host & service check +NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_SERVICE_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][name]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]} +NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_PROCESS_HOST_CHECK_RESULT:[nagios][log][command]};%{DATA:[host][hostname]};%{DATA:[service][state]};%{GREEDYDATA:[nagios][log][check_result]} + +#Disable host & service notifications +NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} +NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} +NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_DISABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]} + +#Enable host & service notifications +NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_SVC_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} +NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_HOST_NOTIFICATIONS:[nagios][log][command]};%{GREEDYDATA:[host][hostname]} +NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_ENABLE_SVC_NOTIFICATIONS:[nagios][log][command]};%{DATA:[host][hostname]};%{GREEDYDATA:[service][name]} + +#Schedule host & service downtime +NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME %{NAGIOS_TYPE_EXTERNAL_COMMAND:[nagios][log][type]}: %{NAGIOS_EC_SCHEDULE_HOST_DOWNTIME:[nagios][log][command]};%{DATA:[host][hostname]};%{NUMBER:[nagios][log][start_time]};%{NUMBER:[nagios][log][end_time]};%{NUMBER:[nagios][log][fixed]};%{NUMBER:[nagios][log][trigger_id]};%{NUMBER:[nagios][log][duration]:int};%{DATA:[user][name]};%{DATA:[nagios][log][comment]} + +#End matching line +NAGIOSLOGLINE %{NAGIOSTIME} (?:%{NAGIOS_WARNING}|%{NAGIOS_CURRENT_SERVICE_STATE}|%{NAGIOS_CURRENT_HOST_STATE}|%{NAGIOS_SERVICE_NOTIFICATION}|%{NAGIOS_HOST_NOTIFICATION}|%{NAGIOS_SERVICE_ALERT}|%{NAGIOS_HOST_ALERT}|%{NAGIOS_SERVICE_FLAPPING_ALERT}|%{NAGIOS_HOST_FLAPPING_ALERT}|%{NAGIOS_SERVICE_DOWNTIME_ALERT}|%{NAGIOS_HOST_DOWNTIME_ALERT}|%{NAGIOS_PASSIVE_SERVICE_CHECK}|%{NAGIOS_PASSIVE_HOST_CHECK}|%{NAGIOS_SERVICE_EVENT_HANDLER}|%{NAGIOS_HOST_EVENT_HANDLER}|%{NAGIOS_TIMEPERIOD_TRANSITION}|%{NAGIOS_EC_LINE_DISABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_ENABLE_SVC_CHECK}|%{NAGIOS_EC_LINE_DISABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_ENABLE_HOST_CHECK}|%{NAGIOS_EC_LINE_PROCESS_HOST_CHECK_RESULT}|%{NAGIOS_EC_LINE_PROCESS_SERVICE_CHECK_RESULT}|%{NAGIOS_EC_LINE_SCHEDULE_HOST_DOWNTIME}|%{NAGIOS_EC_LINE_DISABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_HOST_NOTIFICATIONS}|%{NAGIOS_EC_LINE_DISABLE_SVC_NOTIFICATIONS}|%{NAGIOS_EC_LINE_ENABLE_SVC_NOTIFICATIONS}) diff --git a/plugin-transforms-grok/src/main/resources/patterns/postgresql b/plugin-transforms-grok/src/main/resources/patterns/postgresql new file mode 100644 index 0000000..6af3e16 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/postgresql @@ -0,0 +1,9 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/postgresql +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# Default postgresql pg_log format pattern +POSTGRESQL %{DATESTAMP:timestamp} %{TZ:[event][timezone]} %{DATA:[user][name]} %{GREEDYDATA:[postgresql][log][connection_id]} %{POSINT:[process][pid]:int} diff --git a/plugin-transforms-grok/src/main/resources/patterns/rails b/plugin-transforms-grok/src/main/resources/patterns/rails new file mode 100644 index 0000000..e6989eb --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/rails @@ -0,0 +1,20 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/rails +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +RUUID \h{32} +# rails controller with action +RCONTROLLER (?<[rails][controller][class]>[^#]+)#(?<[rails][controller][action]>\w+) + +# this will often be the only line: +RAILS3HEAD (?m)Started %{WORD:[http][request][method]} "%{URIPATHPARAM:[url][original]}" for %{IPORHOST:[source][address]} at (?%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} %{ISO8601_TIMEZONE}) +# for some a strange reason, params are stripped of {} - not sure that's a good idea. +RPROCESSING \W*Processing by %{RCONTROLLER} as (?<[rails][request][format]>\S+)(?:\W*Parameters: {%{DATA:[rails][request][params]}}\W*)? +RAILS3FOOT Completed %{POSINT:[http][response][status_code]:int}%{DATA} in %{NUMBER:[rails][request][duration][total]:float}ms %{RAILS3PROFILE}%{GREEDYDATA} +RAILS3PROFILE (?:\(Views: %{NUMBER:[rails][request][duration][view]:float}ms \| ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms|\(ActiveRecord: %{NUMBER:[rails][request][duration][active_record]:float}ms)? + +# putting it all together +RAILS3 %{RAILS3HEAD}(?:%{RPROCESSING})?(?<[rails][request][explain][original]>(?:%{DATA}\n)*)(?:%{RAILS3FOOT})? diff --git a/plugin-transforms-grok/src/main/resources/patterns/redis b/plugin-transforms-grok/src/main/resources/patterns/redis new file mode 100644 index 0000000..f6c229b --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/redis @@ -0,0 +1,10 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/redis +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +REDISTIMESTAMP %{MONTHDAY} %{MONTH} %{TIME} +REDISLOG \[%{POSINT:[process][pid]:int}\] %{REDISTIMESTAMP:timestamp} \* +REDISMONLOG %{NUMBER:timestamp} \[%{INT:[redis][database][id]} %{IP:[client][ip]}:%{POSINT:[client][port]:int}\] "%{WORD:[redis][command][name]}"\s?%{GREEDYDATA:[redis][command][args]} diff --git a/plugin-transforms-grok/src/main/resources/patterns/ruby b/plugin-transforms-grok/src/main/resources/patterns/ruby new file mode 100644 index 0000000..db357cf --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/ruby @@ -0,0 +1,9 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/ruby +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +RUBY_LOGLEVEL (?:DEBUG|FATAL|ERROR|WARN|INFO) +RUBY_LOGGER [DFEWI], \[%{TIMESTAMP_ISO8601:timestamp} #%{POSINT:[process][pid]:int}\] *%{RUBY_LOGLEVEL:[log][level]} -- +%{DATA:[process][name]}: %{GREEDYDATA:message} diff --git a/plugin-transforms-grok/src/main/resources/patterns/squid b/plugin-transforms-grok/src/main/resources/patterns/squid new file mode 100644 index 0000000..9ebf40e --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/squid @@ -0,0 +1,13 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/squid +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# Pattern squid3 +# Documentation of squid3 logs formats can be found at the following link: +# http://wiki.squid-cache.org/Features/LogFormat +SQUID3_STATUS (?:%{POSINT:[http][response][status_code]:int}|0|000) +SQUID3 %{NUMBER:timestamp}\s+%{NUMBER:[squid][request][duration]:int}\s%{IP:[source][ip]}\s%{WORD:[event][action]}/%{SQUID3_STATUS}\s%{INT:[http][response][bytes]:int}\s%{WORD:[http][request][method]}\s%{NOTSPACE:[url][original]}\s(?:-|%{NOTSPACE:[user][name]})\s%{WORD:[squid][hierarchy_code]}/(?:-|%{IPORHOST:[destination][address]})\s(?:-|%{NOTSPACE:[http][response][mime_type]}) +# :long - %{INT:[http][response][bytes]:int} diff --git a/plugin-transforms-grok/src/main/resources/patterns/zeek b/plugin-transforms-grok/src/main/resources/patterns/zeek new file mode 100644 index 0000000..5eaa9b9 --- /dev/null +++ b/plugin-transforms-grok/src/main/resources/patterns/zeek @@ -0,0 +1,40 @@ +# This file is a copy from project : Logstash plugins +# https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/patterns/ecs-v1/zeek +# +# SPDX-License-Identifier: Apache-2.0 +# Licensed under the Apache Software License version 2.0, available at https://github.com/logstash-plugins/logstash-patterns-core/blob/v4.3.4/LICENSE +# + +# updated Zeek log matching, for legacy matching see the patters/ecs-v1/bro + +ZEEK_BOOL [TF] +ZEEK_DATA [^\t]+ + +# http.log - the 'new' format (compared to BRO_HTTP) +# has *version* and *origin* fields added and *filename* replaced with *orig_filenames* + *resp_filenames* +ZEEK_HTTP %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{INT:[zeek][http][trans_depth]:int}\t(?:-|%{WORD:[http][request][method]})\t(?:-|%{ZEEK_DATA:[url][domain]})\t(?:-|%{ZEEK_DATA:[url][original]})\t(?:-|%{ZEEK_DATA:[http][request][referrer]})\t(?:-|%{NUMBER:[http][version]})\t(?:-|%{ZEEK_DATA:[user_agent][original]})\t(?:-|%{ZEEK_DATA:[zeek][http][origin]})\t(?:-|%{NUMBER:[http][request][body][bytes]:int})\t(?:-|%{NUMBER:[http][response][body][bytes]:int})\t(?:-|%{POSINT:[http][response][status_code]:int})\t(?:-|%{DATA:[zeek][http][status_msg]})\t(?:-|%{POSINT:[zeek][http][info_code]:int})\t(?:-|%{DATA:[zeek][http][info_msg]})\t(?:\(empty\)|%{ZEEK_DATA:[zeek][http][tags]})\t(?:-|%{ZEEK_DATA:[url][username]})\t(?:-|%{ZEEK_DATA:[url][password]})\t(?:-|%{ZEEK_DATA:[zeek][http][proxied]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][orig_filenames]})\t(?:-|%{ZEEK_DATA:[http][request][mime_type]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_fuids]})\t(?:-|%{ZEEK_DATA:[zeek][http][resp_filenames]})\t(?:-|%{ZEEK_DATA:[http][response][mime_type]}) +# :long - %{NUMBER:[http][request][body][bytes]:int} +# :long - %{NUMBER:[http][response][body][bytes]:int} + +# dns.log - 'updated' BRO_DNS format (added *zeek.dns.rtt*) +ZEEK_DNS %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{INT:[dns][id]:int})\t(?:-|%{NUMBER:[zeek][dns][rtt]:float})\t(?:-|%{ZEEK_DATA:[dns][question][name]})\t(?:-|%{INT:[zeek][dns][qclass]:int})\t(?:-|%{ZEEK_DATA:[zeek][dns][qclass_name]})\t(?:-|%{INT:[zeek][dns][qtype]:int})\t(?:-|%{ZEEK_DATA:[dns][question][type]})\t(?:-|%{INT:[zeek][dns][rcode]:int})\t(?:-|%{ZEEK_DATA:[dns][response_code]})\t%{ZEEK_BOOL:[zeek][dns][AA]}\t%{ZEEK_BOOL:[zeek][dns][TC]}\t%{ZEEK_BOOL:[zeek][dns][RD]}\t%{ZEEK_BOOL:[zeek][dns][RA]}\t%{NONNEGINT:[zeek][dns][Z]:int}\t(?:-|%{ZEEK_DATA:[zeek][dns][answers]})\t(?:-|%{DATA:[zeek][dns][TTLs]})\t(?:-|%{ZEEK_BOOL:[zeek][dns][rejected]}) + +# conn.log - the 'new' format (requires *zeek.connection.local_resp*, handles `(empty)` as `-` for tunnel_parents, and optional mac adresses) +ZEEK_CONN %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][session_id]}\t%{IP:[source][ip]}\t%{INT:[source][port]:int}\t%{IP:[destination][ip]}\t%{INT:[destination][port]:int}\t%{WORD:[network][transport]}\t(?:-|%{ZEEK_DATA:[network][protocol]})\t(?:-|%{NUMBER:[zeek][connection][duration]:float})\t(?:-|%{INT:[zeek][connection][orig_bytes]:int})\t(?:-|%{INT:[zeek][connection][resp_bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][state]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][connection][local_resp]})\t(?:-|%{INT:[zeek][connection][missed_bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][history]})\t(?:-|%{INT:[source][packets]:int})\t(?:-|%{INT:[source][bytes]:int})\t(?:-|%{INT:[destination][packets]:int})\t(?:-|%{INT:[destination][bytes]:int})\t(?:-|%{ZEEK_DATA:[zeek][connection][tunnel_parents]})(?:\t(?:-|%{COMMONMAC:[source][mac]})\t(?:-|%{COMMONMAC:[destination][mac]}))? +# :long - %{INT:[zeek][connection][orig_bytes]:int} +# :long - %{INT:[zeek][connection][resp_bytes]:int} +# :long - %{INT:[zeek][connection][missed_bytes]:int} +# :long - %{INT:[source][packets]:int} +# :long - %{INT:[source][bytes]:int} +# :long - %{INT:[destination][packets]:int} +# :long - %{INT:[destination][bytes]:int} + +# files.log - updated BRO_FILES format (2 new fields added at the end) +ZEEK_FILES_TX_HOSTS (?:-|%{IP:[server][ip]})|(?<[zeek][files][tx_hosts]>%{IP:[server][ip]}(?:[\s,]%{IP})+) +ZEEK_FILES_RX_HOSTS (?:-|%{IP:[client][ip]})|(?<[zeek][files][rx_hosts]>%{IP:[client][ip]}(?:[\s,]%{IP})+) +ZEEK_FILES %{NUMBER:timestamp}\t%{NOTSPACE:[zeek][files][fuid]}\t%{ZEEK_FILES_TX_HOSTS}\t%{ZEEK_FILES_RX_HOSTS}\t(?:-|%{ZEEK_DATA:[zeek][files][session_ids]})\t(?:-|%{ZEEK_DATA:[zeek][files][source]})\t(?:-|%{INT:[zeek][files][depth]:int})\t(?:-|%{ZEEK_DATA:[zeek][files][analyzers]})\t(?:-|%{ZEEK_DATA:[file][mime_type]})\t(?:-|%{ZEEK_DATA:[file][name]})\t(?:-|%{NUMBER:[zeek][files][duration]:float})\t(?:-|%{ZEEK_DATA:[zeek][files][local_orig]})\t(?:-|%{ZEEK_BOOL:[zeek][files][is_orig]})\t(?:-|%{INT:[zeek][files][seen_bytes]:int})\t(?:-|%{INT:[file][size]:int})\t(?:-|%{INT:[zeek][files][missing_bytes]:int})\t(?:-|%{INT:[zeek][files][overflow_bytes]:int})\t(?:-|%{ZEEK_BOOL:[zeek][files][timedout]})\t(?:-|%{ZEEK_DATA:[zeek][files][parent_fuid]})\t(?:-|%{ZEEK_DATA:[file][hash][md5]})\t(?:-|%{ZEEK_DATA:[file][hash][sha1]})\t(?:-|%{ZEEK_DATA:[file][hash][sha256]})\t(?:-|%{ZEEK_DATA:[zeek][files][extracted]})(?:\t(?:-|%{ZEEK_BOOL:[zeek][files][extracted_cutoff]})\t(?:-|%{INT:[zeek][files][extracted_size]:int}))? +# :long - %{INT:[zeek][files][seen_bytes]:int} +# :long - %{INT:[file][size]:int} +# :long - %{INT:[zeek][files][missing_bytes]:int} +# :long - %{INT:[zeek][files][overflow_bytes]:int} +# :long - %{INT:[zeek][files][extracted_size]:int} diff --git a/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/GrokTransformTest.java b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/GrokTransformTest.java new file mode 100644 index 0000000..409753b --- /dev/null +++ b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/GrokTransformTest.java @@ -0,0 +1,133 @@ +package io.kestra.plugin.transforms.grok; + +import io.kestra.core.junit.annotations.KestraTest; +import io.kestra.core.runners.RunContext; +import io.kestra.core.runners.RunContextFactory; +import jakarta.inject.Inject; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +@KestraTest +class GrokTransformTest { + + @Inject + private RunContextFactory runContextFactory; + + @Test + public void shouldExtractNamedCapturedGivenPatternFromDir() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + + String customPattern = """ + EMAILLOCALPART [a-zA-Z][a-zA-Z0-9_.+-=:]+ + EMAIL %{EMAILLOCALPART}@%{HOSTNAME} + """; + + runContext.workingDir() + .putFile(Path.of("custom-patterns/email"), new ByteArrayInputStream(customPattern.getBytes(StandardCharsets.UTF_8))); + + GrokTransform task = GrokTransform.builder() + .pattern("%{EMAIL}") + .namedCapturesOnly(false) + .from("unit-test@kestra.io") + .patternsDir(List.of("./custom-patterns")) + .build(); + + // When + GrokTransform.Output output = task.run(runContext); + + // Then + Assertions.assertEquals( + Map.of("HOSTNAME", "kestra.io", "EMAILLOCALPART", "unit-test", "EMAIL", "unit-test@kestra.io"), + output.getValues() + ); + } + + @Test + public void shouldExtractNamedCapturedGivenSinglePattern() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + GrokTransform task = GrokTransform.builder() + .patterns(List.of("%{EMAILADDRESS}")) + .namedCapturesOnly(false) + .from("unit-test@kestra.io") + .build(); + + // When + GrokTransform.Output output = task.run(runContext); + + // Then + Assertions.assertEquals( + Map.of("HOSTNAME", "kestra.io", "EMAILLOCALPART", "unit-test", "EMAILADDRESS", "unit-test@kestra.io"), + output.getValues() + ); + } + + @Test + public void shouldExtractNamedCapturedGivenSinglePatternAndCapturesOnlyTrue() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + GrokTransform task = GrokTransform.builder() + .patterns(List.of("%{EMAILADDRESS:email}")) + .namedCapturesOnly(true) + .from("unit-test@kestra.io") + .build(); + + // When + GrokTransform.Output output = task.run(runContext); + + // Then + Assertions.assertEquals( + Map.of("email", "unit-test@kestra.io"), + output.getValues() + ); + } + + @Test + public void shouldExtractNamedCapturedGivenConfigWithMultiplePatternsAndBreakFalse() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + GrokTransform task = GrokTransform.builder() + .patterns(List.of("%{NUMBER}", "%{EMAILADDRESS}")) + .namedCapturesOnly(false) + .breakOnFirstMatch(false) + .from("42 unit-test@kestra.io") + .build(); + + // When + GrokTransform.Output output = task.run(runContext); + + // Then + Assertions.assertEquals( + Map.of("NUMBER", "42", "BASE10NUM", "42", "HOSTNAME", "kestra.io", "EMAILLOCALPART", "unit-test", "EMAILADDRESS", "unit-test@kestra.io"), + output.getValues() + ); + } + + @Test + public void shouldExtractNamedCapturedGivenConfigWithMultiplePatternsAndBreakTrue() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + GrokTransform task = GrokTransform.builder() + .patterns(List.of("%{NUMBER}", "%{EMAILADDRESS}")) + .namedCapturesOnly(false) + .breakOnFirstMatch(true) + .from("unit-test@kestra.io") + .build(); + + // When + GrokTransform.Output output = task.run(runContext); + + // Then + Assertions.assertEquals( + Map.of("HOSTNAME", "kestra.io", "EMAILLOCALPART", "unit-test", "EMAILADDRESS", "unit-test@kestra.io"), + output.getValues() + ); + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcherTest.java b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcherTest.java new file mode 100644 index 0000000..c703ad5 --- /dev/null +++ b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokMatcherTest.java @@ -0,0 +1,36 @@ +package io.kestra.plugin.transforms.grok.pattern; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.nio.charset.StandardCharsets; +import java.util.Map; + +class GrokMatcherTest { + private GrokPatternCompiler compiler; + + @BeforeEach + public void setUp() { + compiler = new GrokPatternCompiler(new GrokPatternResolver(), false); + } + + @Test + public void shouldParseGivenSimpleGrokPattern() { + GrokPatternCompiler compiler = new GrokPatternCompiler(new GrokPatternResolver(), false); + final GrokMatcher matcher = compiler.compile("%{EMAILADDRESS}"); + final Map captured = matcher.captures("test@kafka.org".getBytes(StandardCharsets.UTF_8)); + Assertions.assertEquals("kafka.org", captured.get("HOSTNAME")); + Assertions.assertEquals("test@kafka.org", captured.get("EMAILADDRESS")); + Assertions.assertEquals("test", captured.get("EMAILLOCALPART")); + } + + @Test + public void shouldParseGivenCustomGrokPattern() { + final GrokMatcher matcher = compiler.compile("(?(?[a-zA-Z][a-zA-Z0-9_.+-=:]+)@(?\\b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\\.?|\\b)))"); + final Map captured = matcher.captures("test@kestra.io".getBytes(StandardCharsets.UTF_8)); + Assertions.assertEquals("kestra.io", captured.get("HOSTNAME")); + Assertions.assertEquals("test@kestra.io", captured.get("EMAILADDRESS")); + Assertions.assertEquals("test", captured.get("EMAILLOCALPART")); + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompilerTest.java b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompilerTest.java new file mode 100644 index 0000000..17b3fdf --- /dev/null +++ b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternCompilerTest.java @@ -0,0 +1,76 @@ +package io.kestra.plugin.transforms.grok.pattern; + + +import io.kestra.plugin.transforms.grok.data.Type; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class GrokPatternCompilerTest { + + private final GrokPatternResolver resolver = new GrokPatternResolver(); + private final GrokPatternCompiler compiler = new GrokPatternCompiler(new GrokPatternResolver(), false); + + + @Test + public void shouldCompileAllDefinitions() { + Map definitions = resolver.definitions(); + List> errors = new ArrayList<>(definitions.size()); + for (Map.Entry definition : definitions.entrySet()) { + try { + compiler.compile("%{" + definition.getKey() + "}"); + } catch (Exception e) { + errors.add(definition); + } + } + Assertions.assertTrue(errors.isEmpty(), "Failed to compile '[" + errors.size() + "]' definitions: " + errors.stream().map(Map.Entry::getKey).collect(Collectors.toSet())); + } + + @Test + public void shouldCompileMatcherGivenSingleGrokPattern() { + final GrokMatcher matcher = compiler.compile("%{ISO8601_TIMEZONE}"); + Assertions.assertNotNull(matcher); + Assertions.assertEquals("ISO8601_TIMEZONE", matcher.getGrokPattern(0).syntax()); + Assertions.assertEquals("(?(?:Z|[+-](?(?:2[0123]|[01]?[0-9]))(?::?(?(?:[0-5][0-9])))))", matcher.expression()); + } + + @Test + public void shouldCompileMatcherGivenMultipleGrokPattern() { + final GrokMatcher matcher = compiler.compile("%{ISO8601_TIMEZONE} %{LOGLEVEL} %{GREEDYDATA}"); + Assertions.assertNotNull(matcher); + Assertions.assertNotNull(matcher.getGrokPattern("ISO8601_TIMEZONE")); + Assertions.assertNotNull(matcher.getGrokPattern("LOGLEVEL")); + Assertions.assertNotNull(matcher.getGrokPattern("GREEDYDATA")); + Assertions.assertEquals("(?(?:Z|[+-](?(?:2[0123]|[01]?[0-9]))(?::?(?(?:[0-5][0-9]))))) (?([Aa]lert|ALERT|[Tt]race|TRACE|[Dd]ebug|DEBUG|[Nn]otice|NOTICE|[Ii]nfo?(?:rmation)?|INFO?(?:RMATION)?|[Ww]arn?(?:ing)?|WARN?(?:ING)?|[Ee]rr?(?:or)?|ERR?(?:OR)?|[Cc]rit?(?:ical)?|CRIT?(?:ICAL)?|[Ff]atal|FATAL|[Ss]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)) (?.*)", matcher.expression()); + } + + @Test + public void shouldCompileMatcherGivenMultipleGrokPatternWithSemantic() { + final GrokMatcher matcher = compiler.compile("%{ISO8601_TIMEZONE:timezone}"); + Assertions.assertNotNull(matcher); + Assertions.assertEquals("ISO8601_TIMEZONE", matcher.getGrokPattern(0).syntax()); + Assertions.assertEquals("timezone", matcher.getGrokPattern(0).semantic()); + Assertions.assertEquals("(?(?:Z|[+-](?(?:2[0123]|[01]?[0-9]))(?::?(?(?:[0-5][0-9])))))", matcher.expression()); + } + + @Test + public void shouldCompileMatcherGivenMultipleGrokPatternWithSemanticAndType() { + final GrokMatcher matcher = compiler.compile("%{ISO8601_TIMEZONE:timezone:int}"); + Assertions.assertNotNull(matcher); + Assertions.assertEquals("ISO8601_TIMEZONE", matcher.getGrokPattern(0).syntax()); + Assertions.assertEquals("timezone", matcher.getGrokPattern(0).semantic()); + Assertions.assertEquals(Type.INT, matcher.getGrokPattern(0).type()); + Assertions.assertEquals("(?(?:Z|[+-](?(?:2[0123]|[01]?[0-9]))(?::?(?(?:[0-5][0-9])))))", matcher.expression()); + } + + @Test + public void shouldCompileMatcherGivenCustomGrokPattern() { + final GrokMatcher matcher = compiler.compile("(?^[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,6}$)"); + Assertions.assertNotNull(matcher); + Assertions.assertEquals("(?^[A-Z0-9._%+-]+@[A-Z0-9.-]+\\.[A-Z]{2,6}$)", matcher.expression()); + } +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolverTest.java b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolverTest.java new file mode 100644 index 0000000..2d11290 --- /dev/null +++ b/plugin-transforms-grok/src/test/java/io/kestra/plugin/transforms/grok/pattern/GrokPatternResolverTest.java @@ -0,0 +1,23 @@ +package io.kestra.plugin.transforms.grok.pattern; + + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class GrokPatternResolverTest { + + @Test + public void shouldLoadAllGrokPatternsFromClasspath() { + GrokPatternResolver resolver = new GrokPatternResolver(); + resolver.print(); + Assertions.assertFalse(resolver.isEmpty()); + } + + @Test + public void shouldStandardResolveGrokPattern() { + GrokPatternResolver resolver = new GrokPatternResolver(); + String resolve = resolver.resolve("SYSLOGFACILITY"); + Assertions.assertEquals("<%{NONNEGINT:[log][syslog][facility][code]:int}.%{NONNEGINT:[log][syslog][priority]:int}>", resolve); + } + +} \ No newline at end of file diff --git a/plugin-transforms-grok/src/test/resources/allure.properties b/plugin-transforms-grok/src/test/resources/allure.properties new file mode 100644 index 0000000..4873f6d --- /dev/null +++ b/plugin-transforms-grok/src/test/resources/allure.properties @@ -0,0 +1 @@ +allure.results.directory=build/allure-results diff --git a/plugin-transforms-grok/src/test/resources/application.yml b/plugin-transforms-grok/src/test/resources/application.yml new file mode 100644 index 0000000..636ef67 --- /dev/null +++ b/plugin-transforms-grok/src/test/resources/application.yml @@ -0,0 +1,9 @@ +kestra: + repository: + type: memory + queue: + type: memory + storage: + type: local + local: + base-path: /tmp/unittest diff --git a/plugin-transforms-grok/src/test/resources/logback.xml b/plugin-transforms-grok/src/test/resources/logback.xml new file mode 100644 index 0000000..803c82e --- /dev/null +++ b/plugin-transforms-grok/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/plugin-transforms-json/build.gradle b/plugin-transforms-json/build.gradle new file mode 100644 index 0000000..bb828aa --- /dev/null +++ b/plugin-transforms-json/build.gradle @@ -0,0 +1,17 @@ +project.description = 'Kestra Plugin Transformation for Json.' + +jar { + manifest { + attributes( + "X-Kestra-Name": project.name, + "X-Kestra-Title": "Json", + "X-Kestra-Group": project.group + ".json", + "X-Kestra-Description": project.description, + "X-Kestra-Version": project.version + ) + } +} + +dependencies { + implementation 'com.ibm.jsonata4java:JSONata4Java:2.5.0' +} diff --git a/plugin-transforms-json/src/main/java/io/kestra/plugin/transforms/grok/JSONataTransform.java b/plugin-transforms-json/src/main/java/io/kestra/plugin/transforms/grok/JSONataTransform.java new file mode 100644 index 0000000..11c8ccc --- /dev/null +++ b/plugin-transforms-json/src/main/java/io/kestra/plugin/transforms/grok/JSONataTransform.java @@ -0,0 +1,181 @@ +package io.kestra.plugin.transforms.grok; + +import com.api.jsonata4java.expressions.EvaluateException; +import com.api.jsonata4java.expressions.Expressions; +import com.api.jsonata4java.expressions.ParseException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.kestra.core.exceptions.IllegalVariableEvaluationException; +import io.kestra.core.models.annotations.Example; +import io.kestra.core.models.annotations.Plugin; +import io.kestra.core.models.annotations.PluginProperty; +import io.kestra.core.models.tasks.Output; +import io.kestra.core.models.tasks.RunnableTask; +import io.kestra.core.models.tasks.Task; +import io.kestra.core.runners.RunContext; +import io.kestra.core.serializers.FileSerde; +import io.kestra.core.serializers.JacksonMapper; +import io.swagger.v3.oas.annotations.media.Schema; +import jakarta.validation.constraints.NotNull; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.ToString; +import lombok.experimental.SuperBuilder; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; + +import static io.kestra.core.utils.Rethrow.throwFunction; + +@SuperBuilder +@ToString +@EqualsAndHashCode +@Getter +@NoArgsConstructor +@Schema( + title = "Transform .", + description = "" +) +@Plugin( + examples = { + @Example( + title = "Transform JSON payload using JSONata expression", + full = true, + code = """ + id: example + namespace: example + tasks: + - id: transformJson + task: io.kestra.plugin.transforms.grok.GrokTransform + # can be either a Kestra URI or a STRING + from: {{ previousTask.outputs.uri }} + expr: | + { + "order_id": order_id, + "customer_name": customer_name, + "total_price": $sum(items.(quantity * price_per_unit)) + } + """ + ) + } +) +public class JSONataTransform extends Task implements RunnableTask { + + private static final ObjectMapper ION_OBJECT_MAPPER = JacksonMapper.ofIon(); + + @Schema( + title = "The JSON/ION object to transform.", + description = "Must be a valid JSON string or a `kestra://` internal storage URI." + ) + @PluginProperty(dynamic = true) + @NotNull + private String from; + + @PluginProperty(dynamic = true) + @Schema(title = "The JSONata expression to apply on the JSON/ION object provided through the `from` property.") + @NotNull + private String expr; + + @PluginProperty(dynamic = true) + @Schema(title = "The maximum number of recursive calls allowed for the JSONata transformation.") + @NotNull + @Builder.Default + private Integer maxDepth = 1000; + + @PluginProperty(dynamic = true) + @Schema(title = "The maximum duration allowed for the evaluation to occur. If it takes longer the task will fail.") + @NotNull + @Builder.Default + private Duration timeout = Duration.ofSeconds(10); + + + /** + * {@inheritDoc} + **/ + @Override + public Output run(RunContext runContext) throws Exception { + + final Expressions expressions = parseExpression(runContext); + final String renderedFrom = runContext.render(this.from); + + final Output output; + if (renderedFrom.startsWith("kestra://")) { + URI objectURI = new URI(renderedFrom); + try (InputStream is = runContext.storage().getFile(objectURI);) { + Flux flux = FileSerde.readAll(is, new TypeReference() { + }); + output = evaluateExpression(runContext, expressions, flux); + } + } else { + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNode = objectMapper.readTree(renderedFrom); + output = evaluateExpression(runContext, expressions, Mono.just(jsonNode).flux()); + } + return output; + } + + private Output evaluateExpression(RunContext runContext, Expressions expressions, Flux flux) throws IOException { + final Path ouputFilePath = runContext.workingDir().createTempFile(".ion"); + try (final BufferedWriter writer = Files.newBufferedWriter(ouputFilePath)) { + Long processedItemsTotal = flux.map(throwFunction(jsonNode -> { + jsonNode = evaluateExpression(expressions, jsonNode); + writer.write(ION_OBJECT_MAPPER.writeValueAsString(jsonNode)); + writer.newLine(); + return 1L; + })) + .reduce(Long::sum) + .block(); + + writer.flush(); + + URI uri = runContext.storage().putFile(ouputFilePath.toFile()); + return Output + .builder() + .uri(uri) + .processedItemsTotal(processedItemsTotal) + .build(); + } finally { + Files.deleteIfExists(ouputFilePath); // ensure temp file is deleted in case of error + } + } + + private JsonNode evaluateExpression(Expressions expressions, JsonNode jsonNode) { + try { + return expressions.evaluate(jsonNode, getTimeout().toMillis(), getMaxDepth()); + } catch (EvaluateException e) { + throw new RuntimeException("Failed to evaluate expression", e); + } + } + + private Expressions parseExpression(RunContext runContext) throws IllegalVariableEvaluationException { + try { + return Expressions.parse(runContext.render(this.expr)); + } catch (ParseException | IOException e) { + throw new IllegalArgumentException("Invalid JSONata expression. Error: " + e.getMessage(), e); + } + } + + @Builder + @Getter + public static class Output implements io.kestra.core.models.tasks.Output { + @Schema( + title = "File URI containing the result of transformation." + ) + private final URI uri; + + @Schema( + title = "The total number of items that was processed by the task." + ) + private final Long processedItemsTotal; + } +} diff --git a/plugin-transforms-json/src/test/java/io/kestra/plugin/transforms/grok/JSONataTransformTest.java b/plugin-transforms-json/src/test/java/io/kestra/plugin/transforms/grok/JSONataTransformTest.java new file mode 100644 index 0000000..640240a --- /dev/null +++ b/plugin-transforms-json/src/test/java/io/kestra/plugin/transforms/grok/JSONataTransformTest.java @@ -0,0 +1,181 @@ +package io.kestra.plugin.transforms.grok; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.kestra.core.junit.annotations.KestraTest; +import io.kestra.core.runners.RunContext; +import io.kestra.core.runners.RunContextFactory; +import io.kestra.core.serializers.FileSerde; +import jakarta.inject.Inject; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import reactor.core.publisher.Mono; + +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +@KestraTest +class JSONataTransformTest { + + @Inject + private RunContextFactory runContextFactory; + + @Test + void shouldGetOutputForValidExprReturningStringForFromURI() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + final Path ouputFilePath = runContext.workingDir().createTempFile(".ion"); + try (final OutputStream os = Files.newOutputStream(ouputFilePath)) { + FileSerde.writeAll(os, Mono.just(new ObjectMapper().readValue(TEST_DATA, Map.class)).flux()).block(); + os.flush(); + } + URI uri = runContext.storage().putFile(ouputFilePath.toFile()); + + JSONataTransform task = JSONataTransform.builder() + .from(uri.toString()) + .expr(TEST_EXPRESSION) + .build(); + + // When + JSONataTransform.Output output = task.run(runContext); + + // Then + Assertions.assertNotNull(output); + Assertions.assertEquals(1, output.getProcessedItemsTotal()); + + InputStream is = runContext.storage().getFile(output.getUri()); + String transformationResult = FileSerde.readAll(is, new TypeReference() { + }).blockLast(); + + Assertions.assertEquals(TEST_EXPRESSION_RESULT, transformationResult); + } + + @Test + void shouldGetOutputForValidExprReturningStringForFromJSON() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + JSONataTransform task = JSONataTransform.builder() + .from(TEST_DATA) + .expr(TEST_EXPRESSION) + .build(); + + // When + JSONataTransform.Output output = task.run(runContext); + + // Then + Assertions.assertNotNull(output); + Assertions.assertEquals(1, output.getProcessedItemsTotal()); + + InputStream is = runContext.storage().getFile(output.getUri()); + String transformationResult = FileSerde.readAll(is, new TypeReference() { + }).blockLast(); + + Assertions.assertEquals(TEST_EXPRESSION_RESULT, transformationResult); + } + + @Test + void shouldGetOutputForValidExprReturningObjectForFromJSON() throws Exception { + // Given + RunContext runContext = runContextFactory.of(); + JSONataTransform task = JSONataTransform.builder() + .from(""" + { + "order_id": "ABC123", + "customer_name": "John Doe", + "items": [ + { + "product_id": "001", + "name": "Apple", + "quantity": 5, + "price_per_unit": 0.5 + }, + { + "product_id": "002", + "name": "Banana", + "quantity": 3, + "price_per_unit": 0.3 + }, + { + "product_id": "003", + "name": "Orange", + "quantity": 2, + "price_per_unit": 0.4 + } + ] + } + """) + .expr(""" + { + "order_id": order_id, + "customer_name": customer_name, + "total_price": $sum(items.(quantity * price_per_unit)) + } + """) + .build(); + + // When + JSONataTransform.Output output = task.run(runContext); + + // Then + Assertions.assertNotNull(output); + Assertions.assertEquals(1, output.getProcessedItemsTotal()); + + InputStream is = runContext.storage().getFile(output.getUri()); + JsonNode transformationResult = FileSerde.readAll(is, new TypeReference() { + }).blockLast(); + + Assertions.assertEquals("{\"order_id\":\"ABC123\",\"customer_name\":\"John Doe\",\"total_price\":4.2}", transformationResult.toString()); + } + + // example from https://try.jsonata.org/ + private static final String TEST_EXPRESSION = "$sum(Account.Order.Product.(Price * Quantity))"; + private static final String TEST_EXPRESSION_RESULT = "90.57000000000001"; + + private static final String TEST_DATA = """ + { + "Account": { + "Account Name": "Firefly", + "Order": [ + { + "OrderID": "order103", + "Product": [ + { + "Product Name": "Bowler Hat", + "ProductID": 858383, + "SKU": "0406654608", + "Description": { + "Colour": "Purple", + "Width": 300, + "Height": 200, + "Depth": 210, + "Weight": 0.75 + }, + "Price": 34.45, + "Quantity": 2 + }, + { + "Product Name": "Trilby hat", + "ProductID": 858236, + "SKU": "0406634348", + "Description": { + "Colour": "Orange", + "Width": 300, + "Height": 200, + "Depth": 210, + "Weight": 0.6 + }, + "Price": 21.67, + "Quantity": 1 + } + ] + } + ] + } + } + """; +} \ No newline at end of file diff --git a/plugin-transforms-json/src/test/resources/allure.properties b/plugin-transforms-json/src/test/resources/allure.properties new file mode 100644 index 0000000..4873f6d --- /dev/null +++ b/plugin-transforms-json/src/test/resources/allure.properties @@ -0,0 +1 @@ +allure.results.directory=build/allure-results diff --git a/plugin-transforms-json/src/test/resources/application.yml b/plugin-transforms-json/src/test/resources/application.yml new file mode 100644 index 0000000..636ef67 --- /dev/null +++ b/plugin-transforms-json/src/test/resources/application.yml @@ -0,0 +1,9 @@ +kestra: + repository: + type: memory + queue: + type: memory + storage: + type: local + local: + base-path: /tmp/unittest diff --git a/plugin-transforms-json/src/test/resources/logback.xml b/plugin-transforms-json/src/test/resources/logback.xml new file mode 100644 index 0000000..803c82e --- /dev/null +++ b/plugin-transforms-json/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..df4db1a --- /dev/null +++ b/settings.gradle @@ -0,0 +1,4 @@ +rootProject.name = 'plugin-transforms' + +include 'plugin-transforms-json' +include 'plugin-transforms-grok' \ No newline at end of file