• Hive 日志配置 hive-log4j2.properties


    配置不同级别日志到各自文件中

    # Licensed to the Apache Software Foundation (ASF) under one
    # or more contributor license agreements.  See the NOTICE file
    # distributed with this work for additional information
    # regarding copyright ownership.  The ASF licenses this file
    # to you under the Apache License, Version 2.0 (the
    # "License"); you may not use this file except in compliance
    # with the License.  You may obtain a copy of the License at
    #
    #     http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    
    status = INFO
    name = HiveLog4j2
    packages = org.apache.hadoop.hive.ql.log
    
    # list of properties
    property.hive.log.level = INFO
    property.hive.root.logger = DRFA
    property.hive.log.dir = /home/hive/logs
    property.hive.log.file = hive.log
    property.hive.perflogger.log.level = INFO
    
    
    # list of all appenders
    appenders = console, DRFA, DRFB ,warn
    
    # console appender
    appender.console.type = Console
    appender.console.name = console
    appender.console.target = SYSTEM_ERR
    appender.console.layout.type = PatternLayout
    appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    
    # daily rolling file appender
    appender.DRFA.type = RollingRandomAccessFile
    appender.DRFA.name = DRFA
    appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
    appender.DRFA.layout.type = PatternLayout
    appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.DRFA.policies.type = Policies
    appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
    appender.DRFA.policies.time.interval = 1
    appender.DRFA.policies.time.modulate = true
    appender.DRFA.strategy.type = DefaultRolloverStrategy
    appender.DRFA.strategy.max = 30
    
    #ERROR-log
    appender.DRFB.type = RollingRandomAccessFile
    appender.DRFB.name = RollingFileError
    appender.DRFB.fileName = ${sys:hive.log.dir}/hive-error.log
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.DRFB.filePattern = ${sys:hive.log.dir}/hive-error.%d{yyyy-MM-dd}.log
    appender.DRFB.layout.type = PatternLayout
    appender.DRFB.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.DRFB.policies.type = Policies
    appender.DRFB.policies.time.type = TimeBasedTriggeringPolicy
    appender.DRFB.policies.time.interval = 1
    appender.DRFB.policies.time.modulate = true
    appender.DRFB.strategy.type = DefaultRolloverStrategy
    appender.DRFB.strategy.max = 30
    
    
    appender.warn.type = RollingRandomAccessFile
    appender.warn.name = RollingFileWARN
    appender.warn.fileName = ${sys:hive.log.dir}/hive-warn.log
    # Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session
    appender.warn.filePattern = ${sys:hive.log.dir}/hive-warn.%d{yyyy-MM-dd}.log
    appender.warn.layout.type = PatternLayout
    appender.warn.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n
    appender.warn.policies.type = Policies
    appender.warn.policies.time.type = TimeBasedTriggeringPolicy
    appender.warn.policies.time.interval = 1
    appender.warn.policies.time.modulate = true
    appender.warn.strategy.type = DefaultRolloverStrategy
    appender.warn.strategy.max = 30
    
    
    # list of all loggers
    loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp
    
    logger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn
    logger.NIOServerCnxn.level = WARN
    
    logger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO
    logger.ClientCnxnSocketNIO.level = WARN
    
    logger.DataNucleus.name = DataNucleus
    logger.DataNucleus.level = ERROR
    
    logger.Datastore.name = Datastore
    logger.Datastore.level = ERROR
    
    logger.JPOX.name = JPOX
    logger.JPOX.level = ERROR
    
    logger.AmazonAws.name=com.amazonaws
    logger.AmazonAws.level = INFO
    
    logger.ApacheHttp.name=org.apache.http
    logger.ApacheHttp.level = INFO
    
    logger.PerfLogger.name = org.apache.hadoop.hive.ql.log.PerfLogger
    logger.PerfLogger.level = ${sys:hive.perflogger.log.level}
    
    # root logger
    rootLogger.level = ${sys:hive.log.level}
    rootLogger.appenderRef.DRFA.ref = DRFA
    rootLogger.appenderRef.DRFA.level = INFO
    
    rootLogger.appenderRef.DRFB.ref = RollingFileError
    rootLogger.appenderRef.DRFB.level = ERROR
    
    rootLogger.appenderRef.warn.ref = RollingFileWARN
    rootLogger.appenderRef.warn.level = WARN
    #rootLogger.appenderRefs = root,warn
    #rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
  • 相关阅读:
    activiti app 6.0 乱码
    扫盲--CRM系统和ERP系统的区别
    GraphQL实战-第四篇-构建开发框架
    GraphQL实战-第三篇-spring Boot 实现
    GraphQL实战-第二篇-java实现及分析
    GraphQL实战-第一篇-GraphQL介绍
    @PostConstruct及跳坑记录
    jmeter元件的介绍
    Docker之5. 容器的操作
    Docker之4. 镜像的操作
  • 原文地址:https://www.cnblogs.com/cangshublogs/p/12263705.html
Copyright © 2020-2023  润新知