• log Configuration


    Log4j – Configuring Log4j 2 - Apache Log4j 2 https://logging.apache.org/log4j/2.x/manual/configuration.html

    log4j2 实际使用详解 - CSDN博客 https://blog.csdn.net/vbirdbest/article/details/71751835

    Apache log4j 1.2 - Frequently Asked Technical Questions http://logging.apache.org/log4j/1.2/faq.html#noconfig

    log4j uses Thread.getContextClassLoader().getResource() to locate the default configuration files and does not directly check the file system. 

    [INFO] ------------------------------------------------------------------------
    target
    ├── archive-tmp
    ├── classes
    │   ├── com
    │   │   └── mycom
    │   │       ├── ArrayListExample.class
    │   │       ├── ArrayListLinkedListExample.class
    │   │       ├── LinkedListExample.class
    │   │       ├── log4jFlume.class
    │   │       ├── Log4jTest.class
    │   │       ├── MyMR.class
    │   │       ├── SparkWC.class
    │   │       ├── TestMy.class
    │   │       ├── TTSmy.class
    │   │       ├── WordCount.class
    │   │       ├── WordCountImprove.class
    │   │       ├── WordCountImprove$IntSumReducer.class
    │   │       ├── WordCountImprove$TokenizerMapper.class
    │   │       ├── WordCountImprove$TokenizerMapper$CountersEnum.class
    │   │       ├── WordCount$IntSumReducer.class
    │   │       └── WordCount$TokenizerMapper.class
    │   └── log4j.properties
    ├── generated-sources
    │   └── annotations
    ├── maven-archiver
    │   └── pom.properties
    ├── MyAid-1.0.0.jar
    ├── MyAid-1.0.0-jar-with-dependencies.jar
    └── surefire
    
    8 directories, 20 files
    [root@hadoop3 MyBgJavaLan]# java -jar target/MyAid-1.0.0-jar-with-dependencies.jar  com.mycom.Log4jTest
    123
    [INFO ] 2018-07-15 16:32:44,599 method:com.mycom.Log4jTest.main(Log4jTest.java:12)
    my-info
    [DEBUG] 2018-07-15 16:32:44,601 method:com.mycom.Log4jTest.main(Log4jTest.java:13)
    my-debug
    [ERROR] 2018-07-15 16:32:44,601 method:com.mycom.Log4jTest.main(Log4jTest.java:14)
    my-error
    [root@hadoop3 MyBgJavaLan]# ll -as
    总用量 40
     0 drwxr-xr-x   4 root root    87 7月  15 16:31 .
     4 drwxr-xr-x. 12 root root  4096 7月  15 15:40 ..
    12 -rw-r--r--   1 root root 10452 7月  15 15:26 mynote.txt
    12 -rw-r--r--   1 root root 10445 7月  15 15:49 pom.xml
    12 -rw-r--r--   1 root root  9025 7月  10 19:58 pom.xml.BAK.txt
     0 drwxr-xr-x   3 root root    18 7月  15 16:31 src
     0 drwxr-xr-x   7 root root   171 7月  15 16:32 target
    [root@hadoop3 MyBgJavaLan]# ll -as /home/jLog/
    总用量 12
    0 drwxr-xr-x   2 root root   36 7月  15 16:32 .
    4 drwxr-xr-x. 12 root root 4096 7月  15 15:40 ..
    4 -rw-r--r--   1 root root  160 7月  15 16:32 D.log
    4 -rw-r--r--   1 root root   54 7月  15 16:32 error.log
    [root@hadoop3 MyBgJavaLan]# tree src
    src
    └── main
        ├── java
        │   └── com
        │       └── mycom
        │           ├── ArrayListExample.java
        │           ├── ArrayListLinkedListExample.java
        │           ├── LinkedListExample.java
        │           ├── log4jFlume.java
        │           ├── Log4jTest.java
        │           ├── MyMR.java
        │           ├── SparkWC.java
        │           ├── TestMy.java
        │           ├── TTSmy.java
        │           ├── WordCountImprove.java
        │           └── WordCount.java
        └── resources
            └── log4j.properties
    
    5 directories, 12 files
    [root@hadoop3 MyBgJavaLan]# 
    

      

    package com.mycom;
    
    import org.apache.log4j.Logger;
    import org.apache.log4j.PropertyConfigurator;
    
    public class Log4jTest {
    
        private static Logger logger = Logger.getLogger(Log4jTest.class);
    
        public static void main(String[] args) {
            System.out.println("123");
            logger.info("my-info");
            logger.debug("my-debug");
            logger.error("my-error");
        }
    
    }
    

      

    #log4j.rootLogger=INFO
    #log4j.category.com.mycom=INFO,flume
    #log4j.appender.flume=org.apache.flume.clients.log4jappender.Log4jAppender
    #log4j.appender.flume.Hostname=localhost
    #log4j.appender.flume.Port=44444
    #log4j.appender.flume.UnsafeMode=true
    ### 设置###
    log4j.rootLogger=debug,stdout,D,E
    ### 输出信息到控制抬 ###
    log4j.appender.stdout=org.apache.log4j.ConsoleAppender
    log4j.appender.stdout.Target=System.out
    log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
    log4j.appender.stdout.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n
    ### 输出DEBUG 级别以上的日志到=E://logs/error.log ###
    log4j.appender.D=org.apache.log4j.DailyRollingFileAppender
    log4j.appender.D.File=/home/jLog/D.log
    log4j.appender.D.Append=true
    log4j.appender.D.Threshold=DEBUG 
    log4j.appender.D.layout=org.apache.log4j.PatternLayout
    log4j.appender.D.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss}  [ %t:%r ] - [ %p ]  %m%n
    ### 输出ERROR 级别以上的日志到=E://logs/error.log ###
    log4j.appender.E=org.apache.log4j.DailyRollingFileAppender
    log4j.appender.E.File=/home/jLog/error.log 
    log4j.appender.E.Append=true
    log4j.appender.E.Threshold=ERROR 
    log4j.appender.E.layout=org.apache.log4j.PatternLayout
    log4j.appender.E.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss}  [ %t:%r ] - [ %p ]  %m%n
    

      

            <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
            <dependency>
                <groupId>org.apache.logging.log4j</groupId>
                <artifactId>log4j-core</artifactId>
                <version>2.10.0</version>
            </dependency>
    

      

    log4j直接输出日志到flume 
    ############
    [root@hadoop3 apache-flume-1.8.0-bin]# ll -as
    总用量 148
     0 drwxr-xr-x  7 root root   187 7月  16 10:44 .
     0 drwxr-xr-x  6 root root   202 7月  16 10:44 ..
     0 drwxr-xr-x  2 root root    62 7月  16 10:44 bin
    80 -rw-r--r--  1 root root 81264 9月  15 2017 CHANGELOG
     0 drwxr-xr-x  2 root root   127 7月  16 10:44 conf
     8 -rw-r--r--  1 root root  5681 9月  15 2017 DEVNOTES
     4 -rw-r--r--  1 root root  2873 9月  15 2017 doap_Flume.rdf
     4 drwxr-xr-x 10 root root  4096 9月  15 2017 docs
    12 drwxr-xr-x  2 root root  8192 7月  16 10:44 lib
    28 -rw-r--r--  1 root root 27663 9月  15 2017 LICENSE
     4 -rw-r--r--  1 root root   249 9月  15 2017 NOTICE
     4 -rw-r--r--  1 root root  2483 9月  15 2017 README.md
     4 -rw-r--r--  1 root root  1588 9月  15 2017 RELEASE-NOTES
     0 drwxr-xr-x  2 root root    68 7月  16 10:44 tools
    [root@hadoop3 apache-flume-1.8.0-bin]# tree conf/
    conf/
    ├── flume-conf.properties.template
    ├── flume-env.ps1.template
    ├── flume-env.sh.template
    └── log4j.properties
    
    0 directories, 4 files
    [root@hadoop3 apache-flume-1.8.0-bin]#
    
    ############
    [root@hadoop3 apache-flume-1.8.0-bin]# tree conf/
    conf/
    ├── flume-conf.properties.template
    ├── flume-env.ps1.template
    ├── flume-env.sh.template
    └── log4j.properties
    
    0 directories, 4 files
    [root@hadoop3 apache-flume-1.8.0-bin]# cat conf/log4j.properties 
    #
    # Licensed to the Apache Software Foundation (ASF) under one
    # or more contributor license agreements.  See the NOTICE file
    # distributed with this work for additional information
    # regarding copyright ownership.  The ASF licenses this file
    # to you under the Apache License, Version 2.0 (the
    # "License"); you may not use this file except in compliance
    # with the License.  You may obtain a copy of the License at
    #
    #  http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing,
    # software distributed under the License is distributed on an
    # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
    # KIND, either express or implied.  See the License for the
    # specific language governing permissions and limitations
    # under the License.
    #
    
    # Define some default values that can be overridden by system properties.
    #
    # For testing, it may also be convenient to specify
    # -Dflume.root.logger=DEBUG,console when launching flume.
    
    #flume.root.logger=DEBUG,console
    flume.root.logger=INFO,LOGFILE
    flume.log.dir=./logs
    flume.log.file=flume.log
    
    log4j.logger.org.apache.flume.lifecycle = INFO
    log4j.logger.org.jboss = WARN
    log4j.logger.org.mortbay = INFO
    log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN
    log4j.logger.org.apache.hadoop = INFO
    log4j.logger.org.apache.hadoop.hive = ERROR
    
    # Define the root logger to the system property "flume.root.logger".
    log4j.rootLogger=${flume.root.logger}
    
    
    # Stock log4j rolling file appender
    # Default log rotation configuration
    log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender
    log4j.appender.LOGFILE.MaxFileSize=100MB
    log4j.appender.LOGFILE.MaxBackupIndex=10
    log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file}
    log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout
    log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n
    
    
    # Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job!
    # This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy.
    # See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html
    # Add "DAILY" to flume.root.logger above if you want to use this
    log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender
    log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
    log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file}
    log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd}
    log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout
    log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n
    
    
    # console
    # Add "console" to flume.root.logger above if you want to use this
    log4j.appender.console=org.apache.log4j.ConsoleAppender
    log4j.appender.console.target=System.err
    log4j.appender.console.layout=org.apache.log4j.PatternLayout
    log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n
    [root@hadoop3 apache-flume-1.8.0-bin]# 

    Flume 1.8.0 Developer Guide — Apache Flume http://flume.apache.org/FlumeDeveloperGuide.html

      bin/flume-ng agent --conf ./conf/ -f conf/flume.conf -Dflume.root.logger=DEBUG,console -n agent1

    package com.mycom;
    
    import org.apache.flume.Event;
    import org.apache.flume.EventDeliveryException;
    import org.apache.flume.api.RpcClient;
    import org.apache.flume.api.RpcClientFactory;
    import org.apache.flume.event.EventBuilder;
    
    import java.nio.charset.Charset;
    
    //http://flume.apache.org/FlumeDeveloperGuide.html
    
    public class MyAppFlume {
        public static void main(String[] args) {
            MyRpcClientFacade client = new MyRpcClientFacade();
            // Initialize client with the remote Flume agent's host and port
            client.init("hadoop3", 41414);
    
            // Send 10 events to the remote Flume agent. That agent should be configured to listen with an AvroSource.
            String sampleData = "Hello Flume!";
            for (int i = 0; i < 20; i++) {
                client.sendDataToFlume(sampleData);
            }
            client.cleanUp();
        }
    }
    
    class MyRpcClientFacade {
        private RpcClient client;
        private String hostname;
        private int port;
    
        public void init(String hostname, int port) {
            // Setup the RPC connection
            this.hostname = hostname;
            this.port = port;
            this.client = RpcClientFactory.getDefaultInstance(hostname, port);
            // Use the following method to create a thrift client(instead of the above line);
            // this.client=RpcClientFactory.getThriftInstance(hostname,port);
    
        }
    
        public void sendDataToFlume(String data) {
            // Create a Flume Event object that encapsulate the sample data
            Event event = EventBuilder.withBody(data, Charset.forName("UTF-8"));
            // Send the event
            try {
                client.append(event);
            } catch (EventDeliveryException e) {
                //  clean up and recreate the client
                client.close();
                client = null;
                client = RpcClientFactory.getDefaultInstance(hostname, port);
            }
        }
    
        public void cleanUp() {
            // Close the RPC connection
            client.close();
        }
    }
    # The configuration file needs to define the sources, 
    # the channels and the sinks.
    # Sources, channels and sinks are defined per agent, 
    # in this case called 'agent'
    
    
    agent1.channels.ch1.type = memory
    
    agent1.sources.avro-source1.channels = ch1
    agent1.sources.avro-source1.type = avro
    agent1.sources.avro-source1.bind = 0.0.0.0
    agent1.sources.avro-source1.port = 41414
    
    
    agent1.sinks.log-sink1.channel = ch1
    agent1.sinks.log-sink1.type = logger
    
    
    agent1.channels = ch1
    agent1.sources = avro-source1
    agent1.sinks = log-sink1
    
    
    
    
    
    [root@hadoop3 myBg]# cat apache-flume-1.8.0-bin/conf/flume.conf 
    [root@hadoop3 MyBgJavaLan]# java -classpath target/MyAid-1.0.0-jar-with-dependencies.jar  com.mycom.MyAppFlume 
    [DEBUG] 2018-07-16 17:09:00,072 method:org.apache.flume.api.NettyAvroRpcClient.configure(NettyAvroRpcClient.java:498)
    Batch size string = 0
    [WARN ] 2018-07-16 17:09:00,076 method:org.apache.flume.api.NettyAvroRpcClient.configure(NettyAvroRpcClient.java:504)
    Invalid value for batchSize: 0; Using default value.
    [WARN ] 2018-07-16 17:09:00,083 method:org.apache.flume.api.NettyAvroRpcClient.configure(NettyAvroRpcClient.java:634)
    Using default maxIOWorkers
    [DEBUG] 2018-07-16 17:09:00,129 method:org.apache.avro.ipc.NettyTransceiver.<init>(NettyTransceiver.java:195)
    Using Netty bootstrap options: {connectTimeoutMillis=20000, tcpNoDelay=true}
    [DEBUG] 2018-07-16 17:09:00,130 method:org.apache.avro.ipc.NettyTransceiver.getChannel(NettyTransceiver.java:252)
    Connecting to hadoop3/192.168.3.103:41414
    [DEBUG] 2018-07-16 17:09:00,148 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0] OPEN
    [DEBUG] 2018-07-16 17:09:00,206 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0, /192.168.3.103:36724 => hadoop3/192.168.3.103:41414] BOUND: /192.168.3.103:36724
    [DEBUG] 2018-07-16 17:09:00,206 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0, /192.168.3.103:36724 => hadoop3/192.168.3.103:41414] CONNECTED: hadoop3/192.168.3.103:41414
    [DEBUG] 2018-07-16 17:09:00,435 method:org.apache.avro.ipc.NettyTransceiver.disconnect(NettyTransceiver.java:314)
    Disconnecting from hadoop3/192.168.3.103:41414
    [DEBUG] 2018-07-16 17:09:00,436 method:org.apache.avro.ipc.NettyTransceiver.disconnect(NettyTransceiver.java:336)
    Removing 1 pending request(s).
    [DEBUG] 2018-07-16 17:09:00,438 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] DISCONNECTED
    [DEBUG] 2018-07-16 17:09:00,439 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] UNBOUND
    [DEBUG] 2018-07-16 17:09:00,440 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:491)
    [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] CLOSED
    [DEBUG] 2018-07-16 17:09:00,440 method:org.apache.avro.ipc.NettyTransceiver$NettyClientAvroHandler.handleUpstream(NettyTransceiver.java:495)
    Remote peer hadoop3/192.168.3.103:41414 closed connection.
    [root@hadoop3 MyBgJavaLan]# 
    

      

    2018-07-16 17:08:21,448 (conf-file-poller-0) [DEBUG - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:127)] Checking file:conf/flume.conf for changes
    2018-07-16 17:08:51,448 (conf-file-poller-0) [DEBUG - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:127)] Checking file:conf/flume.conf for changes
    2018-07-16 17:09:00,202 (New I/O server boss #5) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 => /192.168.3.103:41414] OPEN
    2018-07-16 17:09:00,203 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 => /192.168.3.103:41414] BOUND: /192.168.3.103:41414
    2018-07-16 17:09:00,203 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 => /192.168.3.103:41414] CONNECTED: /192.168.3.103:36724
    2018-07-16 17:09:00,391 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,391 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,405 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,406 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,407 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,407 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,408 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,409 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,410 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,410 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,411 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,412 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,413 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,413 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,415 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,415 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,417 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,417 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,418 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,419 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,420 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,420 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,422 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,422 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,424 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,424 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,425 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,425 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,426 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,427 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,428 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,428 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,429 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,430 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,431 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,431 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,432 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,432 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,434 (New I/O worker #2) [DEBUG - org.apache.flume.source.AvroSource.append(AvroSource.java:351)] Avro source avro-source1: Received avro event
    2018-07-16 17:09:00,434 (SinkRunner-PollingRunner-DefaultSinkProcessor) [INFO - org.apache.flume.sink.LoggerSink.process(LoggerSink.java:95)] Event: { headers:{} body: 48 65 6C 6C 6F 20 46 6C 75 6D 65 21             Hello Flume! }
    2018-07-16 17:09:00,437 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 :> /192.168.3.103:41414] DISCONNECTED
    2018-07-16 17:09:00,439 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 :> /192.168.3.103:41414] UNBOUND
    2018-07-16 17:09:00,439 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.handleUpstream(NettyServer.java:171)] [id: 0x6e7b6074, /192.168.3.103:36724 :> /192.168.3.103:41414] CLOSED
    2018-07-16 17:09:00,439 (New I/O worker #2) [INFO - org.apache.avro.ipc.NettyServer$NettyServerAvroHandler.channelClosed(NettyServer.java:209)] Connection to /192.168.3.103:36724 disconnected.
    2018-07-16 17:09:21,448 (conf-file-poller-0) [DEBUG - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:127)] Checking file:conf/flume.conf for changes
    2018-07-16 17:09:51,449 (conf-file-poller-0) [DEBUG - org.apache.flume.node.PollingPropertiesFileConfigurationProvider$FileWatcherRunnable.run(PollingPropertiesFileConfigurationProvider.java:127)] Checking file:conf/flume.conf for changes
    

      

    [root@hadoop3 apache-flume-1.8.0-bin]# ll -as  /home/jLog/
    总用量 8
    0 drwxr-xr-x   2 root root   36 7月  16 17:09 .
    4 drwxr-xr-x. 12 root root 4096 7月  15 15:40 ..
    4 -rw-r--r--   1 root root 1551 7月  16 17:09 D.log
    0 -rw-r--r--   1 root root    0 7月  16 17:09 error.log
    [root@hadoop3 apache-flume-1.8.0-bin]# cat /home/jLog/error.log 
    [root@hadoop3 apache-flume-1.8.0-bin]# cat /home/jLog/D.log 
    2018-07-16 17:09:00  [ main:0 ] - [ DEBUG ]  Batch size string = 0
    2018-07-16 17:09:00  [ main:4 ] - [ WARN ]  Invalid value for batchSize: 0; Using default value.
    2018-07-16 17:09:00  [ main:11 ] - [ WARN ]  Using default maxIOWorkers
    2018-07-16 17:09:00  [ main:57 ] - [ DEBUG ]  Using Netty bootstrap options: {connectTimeoutMillis=20000, tcpNoDelay=true}
    2018-07-16 17:09:00  [ main:58 ] - [ DEBUG ]  Connecting to hadoop3/192.168.3.103:41414
    2018-07-16 17:09:00  [ main:76 ] - [ DEBUG ]  [id: 0x5aeca4d0] OPEN
    2018-07-16 17:09:00  [ New I/O worker #1:134 ] - [ DEBUG ]  [id: 0x5aeca4d0, /192.168.3.103:36724 => hadoop3/192.168.3.103:41414] BOUND: /192.168.3.103:36724
    2018-07-16 17:09:00  [ New I/O worker #1:134 ] - [ DEBUG ]  [id: 0x5aeca4d0, /192.168.3.103:36724 => hadoop3/192.168.3.103:41414] CONNECTED: hadoop3/192.168.3.103:41414
    2018-07-16 17:09:00  [ main:363 ] - [ DEBUG ]  Disconnecting from hadoop3/192.168.3.103:41414
    2018-07-16 17:09:00  [ main:364 ] - [ DEBUG ]  Removing 1 pending request(s).
    2018-07-16 17:09:00  [ New I/O worker #1:366 ] - [ DEBUG ]  [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] DISCONNECTED
    2018-07-16 17:09:00  [ New I/O worker #1:367 ] - [ DEBUG ]  [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] UNBOUND
    2018-07-16 17:09:00  [ New I/O worker #1:368 ] - [ DEBUG ]  [id: 0x5aeca4d0, /192.168.3.103:36724 :> hadoop3/192.168.3.103:41414] CLOSED
    2018-07-16 17:09:00  [ New I/O worker #1:368 ] - [ DEBUG ]  Remote peer hadoop3/192.168.3.103:41414 closed connection.
    [root@hadoop3 apache-flume-1.8.0-bin]# 
    

      

  • 相关阅读:
    [转]WebService 之 WSDL文件 讲解
    WebService代理类的详解
    Net 多语言
    Net缓存使用
    分布式缓存的一些想法
    关于webdriver中弹出框的定位
    关于Autoit上传文件的问题
    webdriver中定位元素,报无法找到元素的问题
    java.net.SocketException: Software caused connection abort: socket write error
    接口测试之soupui&groovy
  • 原文地址:https://www.cnblogs.com/rsapaper/p/9311857.html
Copyright © 2020-2023  润新知