From dde482856f050332624cef85e9e657f7cd5ca32a Mon Sep 17 00:00:00 2001 From: Jean-Marie Gervais Date: Tue, 6 Feb 2024 11:09:09 +0100 Subject: [PATCH] feat(ranger-plugins): enable log4j for auditlogs --- .../common/templates/log4j.properties.j2 | 10 +++++++ .../hbase/install_hbase.properties.j2 | 7 +++++ .../templates/hbase/log4j.properties.j2 | 10 +++++++ roles/hbase/master/tasks/config.yml | 2 ++ roles/hbase/ranger/tasks/config.yml | 12 ++++++++ roles/hbase/regionserver/tasks/config.yml | 2 ++ .../templates/install_hdfs.properties.j2 | 8 +++++- roles/hdfs/namenode/tasks/config.yml | 2 ++ roles/hdfs/ranger/tasks/config.yml | 18 ++++++++++++ roles/hive/common/templates/hive-env.sh.j2 | 2 +- .../templates/hive-log4j2.properties.j2 | 28 +++++++++++++++++-- .../common/templates/install.properties.j2 | 7 +++++ roles/hive/ranger/tasks/config.yml | 13 +++++++++ .../templates/gateway-log4j.properties.j2 | 11 ++++++++ .../common/templates/install.properties.j2 | 7 +++++ roles/knox/ranger/tasks/config.yml | 12 ++++++++ .../templates/install_yarn.properties.j2 | 7 +++++ roles/yarn/ranger/tasks/config.yml | 18 ++++++++++++ roles/yarn/resourcemanager/tasks/config.yml | 2 ++ tdp_vars_defaults/tdp-cluster/tdp-cluster.yml | 12 ++++++-- 20 files changed, 184 insertions(+), 6 deletions(-) diff --git a/roles/hadoop/common/templates/log4j.properties.j2 b/roles/hadoop/common/templates/log4j.properties.j2 index 7f08ec4a..a2a483d8 100644 --- a/roles/hadoop/common/templates/log4j.properties.j2 +++ b/roles/hadoop/common/templates/log4j.properties.j2 @@ -309,3 +309,13 @@ log4j.appender.EWMA.maxUniqueMessages=${yarn.ewma.maxUniqueMessages} # Log levels of third-party libraries log4j.logger.org.apache.commons.beanutils=WARN + +{% if enable_ranger_audit_log4j and ranger_audit_file is defined %} +ranger.logger=INFO,console,RANGERAUDIT +log4j.logger.xaaudit=${ranger.logger} +log4j.appender.RANGERAUDIT=org.apache.log4j.DailyRollingFileAppender +log4j.appender.RANGERAUDIT.File={{ ranger_audit_file }} +log4j.appender.RANGERAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.RANGERAUDIT.layout.ConversionPattern={{ tdp_auditlog_layout_pattern }} +#log4j.appender.RANGERAUDIT.DatePattern=.yyyy-MM-dd +{% endif %} diff --git a/roles/hbase/common/templates/hbase/install_hbase.properties.j2 b/roles/hbase/common/templates/hbase/install_hbase.properties.j2 index db7711a2..61fc3220 100644 --- a/roles/hbase/common/templates/hbase/install_hbase.properties.j2 +++ b/roles/hbase/common/templates/hbase/install_hbase.properties.j2 @@ -117,6 +117,13 @@ XAAUDIT.SOLR.MAX_QUEUE_SIZE=1 XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS=1000 XAAUDIT.SOLR.SOLR_URL=http://localhost:6083/solr/ranger_audits +{% if enable_ranger_audit_log4j %} +# log4j audit +XAAUDIT.LOG4J.IS_ENABLED=true +XAAUDIT.LOG4J.DESTINATION=true +XAAUDIT.LOG4J.DESTINATION.LOGGER=xaaudit +{% endif %} + # # SSL Client Certificate Information # diff --git a/roles/hbase/common/templates/hbase/log4j.properties.j2 b/roles/hbase/common/templates/hbase/log4j.properties.j2 index 59f96f44..75a01974 100644 --- a/roles/hbase/common/templates/hbase/log4j.properties.j2 +++ b/roles/hbase/common/templates/hbase/log4j.properties.j2 @@ -122,3 +122,13 @@ log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level} log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN + +{% if enable_ranger_audit_log4j and ranger_audit_file is defined %} +ranger.logger=INFO,console,RANGERAUDIT +log4j.logger.xaaudit=${ranger.logger} +log4j.appender.RANGERAUDIT=org.apache.log4j.DailyRollingFileAppender +log4j.appender.RANGERAUDIT.File={{ ranger_audit_file }} +log4j.appender.RANGERAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.RANGERAUDIT.layout.ConversionPattern={{ tdp_auditlog_layout_pattern }} +#log4j.appender.RANGERAUDIT.DatePattern=.yyyy-MM-dd +{% endif %} diff --git a/roles/hbase/master/tasks/config.yml b/roles/hbase/master/tasks/config.yml index bd863b2f..eb627520 100644 --- a/roles/hbase/master/tasks/config.yml +++ b/roles/hbase/master/tasks/config.yml @@ -27,6 +27,8 @@ owner: root group: root mode: "644" + vars: + ranger_audit_file: "{{ hbase_log_dir }}/{{ hbase_master_ranger_audit_file }}" - name: Render hbase-site.xml ansible.builtin.template: diff --git a/roles/hbase/ranger/tasks/config.yml b/roles/hbase/ranger/tasks/config.yml index 400d4d3b..88d5c615 100644 --- a/roles/hbase/ranger/tasks/config.yml +++ b/roles/hbase/ranger/tasks/config.yml @@ -24,6 +24,18 @@ dest: "{{ hbase_install_dir }}/conf" state: link +- name: Enable log4j audit in ranger-hive-audit.xml + ansible.builtin.blockinfile: + path: "{{ ranger_hbase_install_dir }}/install/conf.templates/enable/ranger-hbase-audit-changes.cfg" + marker: "# {mark} ANSIBLE MANAGED BLOCK : log4j audit logs" + insertbefore: "^AZURE.ACCOUNTNAME" + block: | + xasecure.audit.log4j.is.enabled %XAAUDIT.LOG4J.IS_ENABLED% mod create-if-not-exists + xasecure.audit.destination.log4j %XAAUDIT.LOG4J.DESTINATION% mod create-if-not-exists + xasecure.audit.destination.log4j.logger %XAAUDIT.LOG4J.DESTINATION.LOGGER% mod create-if-not-exists + + when: enable_ranger_audit_log4j + - name: Run enable-hbase-plugin.sh ansible.builtin.shell: | export JAVA_HOME={{ java_home }} diff --git a/roles/hbase/regionserver/tasks/config.yml b/roles/hbase/regionserver/tasks/config.yml index 11827fdc..6a9ad206 100644 --- a/roles/hbase/regionserver/tasks/config.yml +++ b/roles/hbase/regionserver/tasks/config.yml @@ -27,6 +27,8 @@ owner: root group: root mode: "644" + vars: + ranger_audit_file: "{{ hbase_log_dir }}/{{ hbase_rs_ranger_audit_file }}" - name: Render hbase-site.xml ansible.builtin.template: diff --git a/roles/hdfs/common/templates/install_hdfs.properties.j2 b/roles/hdfs/common/templates/install_hdfs.properties.j2 index 554d476c..029b7e3d 100644 --- a/roles/hdfs/common/templates/install_hdfs.properties.j2 +++ b/roles/hdfs/common/templates/install_hdfs.properties.j2 @@ -103,12 +103,18 @@ XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS=60 XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS=600 XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT=10 -#Solr Audit Provder +#Solr Audit Provider XAAUDIT.SOLR.IS_ENABLED=false XAAUDIT.SOLR.MAX_QUEUE_SIZE=1 XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS=1000 XAAUDIT.SOLR.SOLR_URL=http://localhost:6083/solr/ranger_audits +{% if enable_ranger_audit_log4j %} +# log4j audit +XAAUDIT.LOG4J.IS_ENABLED=true +XAAUDIT.LOG4J.DESTINATION=true +XAAUDIT.LOG4J.DESTINATION.LOGGER=xaaudit +{% endif %} # # SSL Client Certificate Information diff --git a/roles/hdfs/namenode/tasks/config.yml b/roles/hdfs/namenode/tasks/config.yml index 3cbd40a0..598e849c 100644 --- a/roles/hdfs/namenode/tasks/config.yml +++ b/roles/hdfs/namenode/tasks/config.yml @@ -30,6 +30,8 @@ owner: root group: root mode: "644" + vars: + ranger_audit_file: "{{ hdfs_log_dir }}/{{ hadoop_hdfs_ranger_audit_file }}" - name: Template HDFS ZKFC service file ansible.builtin.template: diff --git a/roles/hdfs/ranger/tasks/config.yml b/roles/hdfs/ranger/tasks/config.yml index c788a21d..42c07886 100644 --- a/roles/hdfs/ranger/tasks/config.yml +++ b/roles/hdfs/ranger/tasks/config.yml @@ -33,9 +33,27 @@ line: 'ranger.plugin.hdfs.policy.rest.ssl.config.file {{ hadoop_nn_conf_dir }}/ranger-policymgr-ssl.xml \2' backrefs: true +- name: Enable log4j audit in ranger-hdfs-audit.xml + ansible.builtin.blockinfile: + path: "{{ ranger_hdfs_install_dir }}/install/conf.templates/enable/ranger-hdfs-audit-changes.cfg" + marker: "# {mark} ANSIBLE MANAGED BLOCK : log4j audit logs" + insertbefore: "^AZURE.ACCOUNTNAME" + block: | + xasecure.audit.log4j.is.enabled %XAAUDIT.LOG4J.IS_ENABLED% mod create-if-not-exists + xasecure.audit.destination.log4j %XAAUDIT.LOG4J.DESTINATION% mod create-if-not-exists + xasecure.audit.destination.log4j.logger %XAAUDIT.LOG4J.DESTINATION.LOGGER% mod create-if-not-exists + + when: enable_ranger_audit_log4j + - name: Run enable-hdfs-plugin.sh ansible.builtin.shell: | export JAVA_HOME={{ java_home }} ./enable-hdfs-plugin.sh args: chdir: "{{ ranger_hdfs_install_dir }}" + +- name: Reestablish symbolic link from etc/hadoop in {{ hadoop_install_dir }} to {{ hadoop_client_conf_dir }} + ansible.builtin.file: + src: "{{ hadoop_client_conf_dir }}" + dest: "{{ hadoop_install_dir }}/etc/hadoop" + state: link diff --git a/roles/hive/common/templates/hive-env.sh.j2 b/roles/hive/common/templates/hive-env.sh.j2 index 5b9557ff..0cccccb5 100644 --- a/roles/hive/common/templates/hive-env.sh.j2 +++ b/roles/hive/common/templates/hive-env.sh.j2 @@ -73,7 +73,7 @@ if [ "$SERVICE" = "hiveserver2" ]; then # Setting for HiveServer2 and Client export HADOOP_HEAPSIZE="{{ hive_hs2_heapsize }}" export HADOOP_LOGS_OPTS="-Dhive.log.dir={{ hive_log_dir }} -Dhive.log.file={{ hive_s2_log_file }} -Dhive.log.level={{ hive_root_logger_level }} -Dhive.root.logger={{ hive_root_logger }}" - export HADOOP_GC_OPTS="-Xloggc:{{ hive_log_dir }}/hiveserver2-gc-%t.log -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCCause -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=10M -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath={{ hive_log_dir }}/hs2_heapdump.hprof" + export HADOOP_GC_OPTS="-Xloggc:{{ hive_log_dir }}/hiveserver2-gc-%t.log -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCCause -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=10M -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath={{ hive_log_dir }}/hs2_heapdump.hprof" export HADOOP_OPTS="$HADOOP_OPTS $JMX_OPTS ${HADOOP_LOGS_OPTS}" fi diff --git a/roles/hive/common/templates/hive-log4j2.properties.j2 b/roles/hive/common/templates/hive-log4j2.properties.j2 index 31ef9168..31d767f0 100644 --- a/roles/hive/common/templates/hive-log4j2.properties.j2 +++ b/roles/hive/common/templates/hive-log4j2.properties.j2 @@ -26,7 +26,8 @@ property.hive.log.file = hive.log property.hive.perflogger.log.level = INFO # list of all appenders -appenders = {{ hive_root_logger }} +appenders = {%- if enable_ranger_audit_log4j %}RANGERAUDIT, {% endif %}{{ hive_root_logger }} + # console appender appender.console.type = Console @@ -52,7 +53,7 @@ appender.RFA.strategy.max = {{ hive_log_rfa_maxbackupindex }} appender.DRFA.type = RollingRandomAccessFile appender.DRFA.name = DRFA appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file} -appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.{{ hive_log_drfa_date_pattern }} +appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.{{ hive_log_drfa_date_pattern }} appender.DRFA.layout.type = PatternLayout appender.DRFA.layout.pattern = {{ hive_log_layout_pattern }} appender.DRFA.policies.type = Policies @@ -62,8 +63,31 @@ appender.DRFA.policies.timebased.modulate = false appender.DRFA.strategy.type = DefaultRolloverStrategy appender.DRFA.strategy.max = {{ hive_log_drfa_maxbackupindex }} +{% if enable_ranger_audit_log4j %} +appender.RANGERAUDIT.type = RollingRandomAccessFile +appender.RANGERAUDIT.name = RANGERAUDIT +appender.RANGERAUDIT.fileName = ${sys:hive.log.dir}/{{ hive_ranger_audit_file }} +appender.RANGERAUDIT.filePattern = ${sys:hive.log.dir}/{{ hive_ranger_audit_file }}.{{ hive_log_drfa_date_pattern }} +appender.RANGERAUDIT.layout.type = PatternLayout +appender.RANGERAUDIT.layout.pattern = {{ tdp_auditlog_layout_pattern }} +appender.RANGERAUDIT.policies.type = Policies +appender.RANGERAUDIT.policies.timebased.type = TimeBasedTriggeringPolicy +appender.RANGERAUDIT.policies.timebased.interval = 1 +appender.RANGERAUDIT.policies.timebased.modulate = false +appender.RANGERAUDIT.strategy.type = DefaultRolloverStrategy +appender.RANGERAUDIT.strategy.max = {{ hive_log_drfa_maxbackupindex }} +{% endif %} + # list of all loggers loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp +{%- if enable_ranger_audit_log4j %}, xaaudit{% endif %} + + +{% if enable_ranger_audit_log4j %} +logger.xaaudit.name = xaaudit +logger.xaaudit.level = INFO +logger.xaaudit.appenderRef.RANGERAUDIT.ref = RANGERAUDIT +{% endif %} logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn logger.NIOServerCnxn.level = WARN diff --git a/roles/hive/common/templates/install.properties.j2 b/roles/hive/common/templates/install.properties.j2 index db6bfac9..2db9a5ec 100755 --- a/roles/hive/common/templates/install.properties.j2 +++ b/roles/hive/common/templates/install.properties.j2 @@ -113,6 +113,13 @@ XAAUDIT.SOLR.MAX_QUEUE_SIZE=1 XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS=1000 XAAUDIT.SOLR.SOLR_URL=http://localhost:6083/solr/ranger_audits +{% if enable_ranger_audit_log4j %} +# log4j audit +XAAUDIT.LOG4J.IS_ENABLED=true +XAAUDIT.LOG4J.DESTINATION=true +XAAUDIT.LOG4J.DESTINATION.LOGGER=xaaudit +{% endif %} + # # SSL Client Certificate Information # diff --git a/roles/hive/ranger/tasks/config.yml b/roles/hive/ranger/tasks/config.yml index 11804a27..51d733a5 100644 --- a/roles/hive/ranger/tasks/config.yml +++ b/roles/hive/ranger/tasks/config.yml @@ -33,6 +33,19 @@ line: 'ranger.plugin.hive.policy.rest.ssl.config.file {{ hive_s2_conf_dir }}/ranger-policymgr-ssl.xml \2' backrefs: true +- name: Enable log4j audit in ranger-hive-audit.xml + ansible.builtin.blockinfile: + path: "{{ ranger_hive_install_dir }}/install/conf.templates/enable/ranger-hive-audit-changes.cfg" + marker: "# {mark} ANSIBLE MANAGED BLOCK : log4j audit logs" + insertbefore: "^AZURE.ACCOUNTNAME" + block: | + xasecure.audit.log4j.is.enabled %XAAUDIT.LOG4J.IS_ENABLED% mod create-if-not-exists + xasecure.audit.destination.log4j %XAAUDIT.LOG4J.DESTINATION% mod create-if-not-exists + xasecure.audit.destination.log4j.logger %XAAUDIT.LOG4J.DESTINATION.LOGGER% mod create-if-not-exists + + when: enable_ranger_audit_log4j + + - name: Run enable-hive-plugin.sh ansible.builtin.shell: | export JAVA_HOME={{ java_home }} diff --git a/roles/knox/common/templates/gateway-log4j.properties.j2 b/roles/knox/common/templates/gateway-log4j.properties.j2 index 03895dd4..66042700 100644 --- a/roles/knox/common/templates/gateway-log4j.properties.j2 +++ b/roles/knox/common/templates/gateway-log4j.properties.j2 @@ -104,3 +104,14 @@ log4j.appender.KNOXAGENT.File=${app.log.dir}/${ranger.knoxagent.log.file} log4j.appender.KNOXAGENT.layout=org.apache.log4j.PatternLayout log4j.appender.KNOXAGENT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n %L log4j.appender.KNOXAGENT.DatePattern=.yyyy-MM-dd + + +{% if enable_ranger_audit_log4j %} +ranger.logger=INFO,console,RANGERAUDIT +log4j.logger.xaaudit=${ranger.logger} +log4j.appender.RANGERAUDIT=org.apache.log4j.DailyRollingFileAppender +log4j.appender.RANGERAUDIT.File={{ knox_log_dir }}/{{ knox_ranger_audit_file }} +log4j.appender.RANGERAUDIT.layout=org.apache.log4j.PatternLayout +log4j.appender.RANGERAUDIT.layout.ConversionPattern={{ tdp_auditlog_layout_pattern }} +#log4j.appender.RANGERAUDIT.DatePattern=.yyyy-MM-dd +{% endif %} diff --git a/roles/knox/common/templates/install.properties.j2 b/roles/knox/common/templates/install.properties.j2 index a6b63d56..822c3bac 100755 --- a/roles/knox/common/templates/install.properties.j2 +++ b/roles/knox/common/templates/install.properties.j2 @@ -111,6 +111,13 @@ XAAUDIT.SOLR.MAX_QUEUE_SIZE=1 XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS=1000 XAAUDIT.SOLR.SOLR_URL=http://localhost:6083/solr/ranger_audits +{% if enable_ranger_audit_log4j %} +# log4j audit +XAAUDIT.LOG4J.IS_ENABLED=true +XAAUDIT.LOG4J.DESTINATION=true +XAAUDIT.LOG4J.DESTINATION.LOGGER=xaaudit +{% endif %} + # # SSL Client Certificate Information # diff --git a/roles/knox/ranger/tasks/config.yml b/roles/knox/ranger/tasks/config.yml index 7a034a87..d20817a3 100644 --- a/roles/knox/ranger/tasks/config.yml +++ b/roles/knox/ranger/tasks/config.yml @@ -34,6 +34,18 @@ line: 'ranger.plugin.knox.policy.rest.ssl.config.file /etc/knox/conf/ranger-policymgr-ssl.xml \2' backrefs: true +- name: Enable log4j audit in ranger-hive-audit.xml + ansible.builtin.blockinfile: + path: "{{ ranger_knox_install_dir }}/install/conf.templates/enable/ranger-knox-audit-changes.cfg" + marker: "# {mark} ANSIBLE MANAGED BLOCK : log4j audit logs" + insertbefore: "^AZURE.ACCOUNTNAME" + block: | + xasecure.audit.log4j.is.enabled %XAAUDIT.LOG4J.IS_ENABLED% mod create-if-not-exists + xasecure.audit.destination.log4j %XAAUDIT.LOG4J.DESTINATION% mod create-if-not-exists + xasecure.audit.destination.log4j.logger %XAAUDIT.LOG4J.DESTINATION.LOGGER% mod create-if-not-exists + + when: enable_ranger_audit_log4j + - name: Run enable-knox-plugin.sh ansible.builtin.shell: | export JAVA_HOME={{ java_home }} diff --git a/roles/yarn/common/templates/install_yarn.properties.j2 b/roles/yarn/common/templates/install_yarn.properties.j2 index 23850e2f..63f2882e 100644 --- a/roles/yarn/common/templates/install_yarn.properties.j2 +++ b/roles/yarn/common/templates/install_yarn.properties.j2 @@ -109,6 +109,12 @@ XAAUDIT.SOLR.MAX_QUEUE_SIZE=1 XAAUDIT.SOLR.MAX_FLUSH_INTERVAL_MS=1000 XAAUDIT.SOLR.SOLR_URL=http://localhost:6083/solr/ranger_audits +{% if enable_ranger_audit_log4j %} +# log4j audit +XAAUDIT.LOG4J.IS_ENABLED=true +XAAUDIT.LOG4J.DESTINATION=true +XAAUDIT.LOG4J.DESTINATION.LOGGER=xaaudit +{% endif %} # # SSL Client Certificate Information @@ -138,3 +144,4 @@ CUSTOM_USER=yarn # CUSTOM_COMPONENT_GROUP= # keep blank if component group is default CUSTOM_GROUP=hadoop + diff --git a/roles/yarn/ranger/tasks/config.yml b/roles/yarn/ranger/tasks/config.yml index f4a0018f..8fe88242 100644 --- a/roles/yarn/ranger/tasks/config.yml +++ b/roles/yarn/ranger/tasks/config.yml @@ -34,9 +34,27 @@ line: 'ranger.plugin.yarn.policy.rest.ssl.config.file /etc/hadoop/conf.rm/ranger-policymgr-ssl.xml \2' backrefs: true +- name: Enable log4j audit in ranger-hive-audit.xml + ansible.builtin.blockinfile: + path: "{{ ranger_yarn_install_dir }}/install/conf.templates/enable/ranger-yarn-audit-changes.cfg" + marker: "# {mark} ANSIBLE MANAGED BLOCK : log4j audit logs" + insertbefore: "^AZURE.ACCOUNTNAME" + block: | + xasecure.audit.log4j.is.enabled %XAAUDIT.LOG4J.IS_ENABLED% mod create-if-not-exists + xasecure.audit.destination.log4j %XAAUDIT.LOG4J.DESTINATION% mod create-if-not-exists + xasecure.audit.destination.log4j.logger %XAAUDIT.LOG4J.DESTINATION.LOGGER% mod create-if-not-exists + + when: enable_ranger_audit_log4j + - name: Run enable-yarn-plugin.sh ansible.builtin.shell: | export JAVA_HOME={{ java_home }} ./enable-yarn-plugin.sh args: chdir: "{{ ranger_yarn_install_dir }}" + +- name: Reestablish symbolic link from etc/hadoop in {{ hadoop_install_dir }} to {{ hadoop_client_conf_dir }} + ansible.builtin.file: + src: "{{ hadoop_client_conf_dir }}" + dest: "{{ hadoop_install_dir }}/etc/hadoop" + state: link diff --git a/roles/yarn/resourcemanager/tasks/config.yml b/roles/yarn/resourcemanager/tasks/config.yml index 213ee6bc..46c4f7e6 100644 --- a/roles/yarn/resourcemanager/tasks/config.yml +++ b/roles/yarn/resourcemanager/tasks/config.yml @@ -30,6 +30,8 @@ owner: root group: root mode: "644" + vars: + ranger_audit_file: "{{ yarn_log_dir }}/{{ hadoop_yarn_ranger_audit_file }}" - name: Render core-site.xml diff --git a/tdp_vars_defaults/tdp-cluster/tdp-cluster.yml b/tdp_vars_defaults/tdp-cluster/tdp-cluster.yml index 5ab38514..8aaf4c7d 100644 --- a/tdp_vars_defaults/tdp-cluster/tdp-cluster.yml +++ b/tdp_vars_defaults/tdp-cluster/tdp-cluster.yml @@ -8,7 +8,7 @@ # Directory where TDP component releases are located locally binaries_local_dir: "{{ lookup('env', 'PWD') }}/files" -# Directory where TDP component releases are uploaded for each host +# Directory where TDP component releases are uploaded for each host binaries_upload_dir: /tmp # Name of the cluster. Used as HDFS nameservice and YARN cluster ID @@ -164,10 +164,12 @@ exporter_hdfs_httpfs_http_port: 18122 ################################# # Service & Component logs dirs # ################################# -# format : log4j 1.x "%d{ISO8601}" is not fully compliant with ISO8601 standard +# format : log4j 1.x "%d{ISO8601}" is not fully compliant with ISO8601 standard # (no 'T' as date/time separator) we specify it here. tdp_date_iso8601_with_tz: "%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}" tdp_log_layout_pattern: '{{ tdp_date_iso8601_with_tz }} - %-5p [%t:%C{1}@%L] - %m%n' +enable_ranger_audit_log4j: true +tdp_auditlog_layout_pattern: '%m%n' hadoop_log_dir: /var/log/hadoop @@ -177,11 +179,13 @@ hadoop_hdfs_journalnode_log_file: "hdfs-journalnode_{{ ansible_fqdn }}.log" hadoop_hdfs_zkfc_log_file: "hdfs-zkfc_{{ ansible_fqdn }}.log" hadoop_hdfs_datanode_log_file: "hdfs-datanode_{{ ansible_fqdn }}.log" hadoop_hdfs_httpfs_log_file: "hdfs-httpfs_{{ ansible_fqdn }}.log" +hadoop_hdfs_ranger_audit_file: "hdfs-rangeraudit_{{ ansible_fqdn }}.log" yarn_log_dir: /var/log/yarn hadoop_yarn_resourcemanager_log_file: "yarn-resourcemanager_{{ ansible_fqdn }}.log" hadoop_yarn_nodemanager_log_file: "yarn-nodemanager_{{ ansible_fqdn }}.log" hadoop_yarn_timelineserver_log_file: "yarn-timelineserver_{{ ansible_fqdn }}.log" +hadoop_yarn_ranger_audit_file: "yarn-rangeraudit_{{ ansible_fqdn }}.log" mapred_log_dir: /var/log/mapred hadoop_mapred_historyserver_log_file: "mapred-historyserver_{{ ansible_fqdn }}.log" @@ -190,6 +194,8 @@ hbase_log_dir: /var/log/hbase hbase_hm_log_file: "hbase-master_{{ ansible_fqdn }}.log" hbase_hrs_log_file: "hbase-regionserver_{{ ansible_fqdn }}.log" hbase_hr_log_file: "hbase-rest_{{ ansible_fqdn }}.log" +hbase_master_ranger_audit_file: "hbase-master-rangeraudit_{{ ansible_fqdn }}.log" +hbase_rs_ranger_audit_file: "hbase-region-server-rangeraudit_{{ ansible_fqdn }}.log" phoenix_log_dir: /var/log/phoenix phoenix_queryserver_log_file: "phoenix-queryserver_{{ ansible_fqdn }}.log" @@ -197,9 +203,11 @@ phoenix_queryserver_log_file: "phoenix-queryserver_{{ ansible_fqdn }}.log" hive_log_dir: /var/log/hive hive_s2_log_file: "hive-hiveserver2_{{ ansible_fqdn }}.log" hive_ms_log_file: "hive-metastore_{{ ansible_fqdn }}.log" +hive_ranger_audit_file: "hive-rangeraudit_{{ ansible_fqdn }}.log" knox_log_dir: /var/log/knox knox_gateway_log_file: "knox-gateway_{{ ansible_fqdn }}.log" +knox_ranger_audit_file: "knox-rangeraudit_{{ ansible_fqdn }}.log" ranger_log_dir: /var/log/ranger ranger_admin_log_file: "ranger-admin_{{ ansible_fqdn }}.log"