• Hadoop源码分析31 TaskTracke成员


    1、启动 HttpServer 

    this.server=new HttpServer("task",httpBindAddress, httpPort,

           httpPort == 0, conf, aclsManager.getAdminsAcl());

    server.start();


    2、生成 DefaultTaskController

    taskController= (TaskController)ReflectionUtils.newInstance(taskControllerClass,fConf);

     

    3.JvmManager

    this.jvmManager=new JvmManager(this);

     

    4TaskUmbilicalProtocol的实现

       //set the numhandlers to max*2 since canCommit may wait for theduration

       //of aheartbeat RPC

       this.taskReportServer = RPC.getServer(this,bindAddress,

           tmpPort, 2 * max, falsethis.fConfthis.jobTokenSecretManager);

       this.taskReportServer.start();

     

    5.TrackerDistributedCacheManager

       // InitializeDistributedCache

       this.distributedCacheManager=new TrackerDistributedCacheManager(

           this.fConf,taskController);

       this.distributedCacheManager.startCleanupThread();

     

    6InterTrackerProtocol的客户端

       this.jobClient = (InterTrackerProtocol)

       UserGroupInformation.getLoginUser().doAs(

           new PrivilegedExceptionActionObject(){

         public Object run() throws IOException{

           return RPC.waitForProxy(InterTrackerProtocol.class,

               InterTrackerProtocol.versionID,

               jobTrackAddr,fConf);

         }

       });

     

    7MapEventsFetcherThread

       // start thethread that will fetch map task completion events

       this.mapEventsFetcher new MapEventsFetcherThread();

       。。。

       mapEventsFetcher.start();

     

    8TaskLauncher 

       mapLauncher new TaskLauncher(TaskType.MAPmaxMapSlots);

       reduceLauncher new TaskLauncher(TaskType.REDUCEmaxReduceSlots);

       mapLauncher.start();

       reduceLauncher.start(); 


    9Heartbeat 

    // Send theheartbeat and process the jobtracker's directives

    HeartbeatResponse heartbeatResponse = transmitHeartBeat(now); 

    通过DF命令得到FreeSpace,通过/proc/stat/proc/meminfo/proc/cpuinfo得到CPU、内存等信息。 

    通过调用 InterTrackerProtocol.heartbeat(。。。)传送到JobTracker

      

    TaskTrackerRPC请求:heartbeatfrom 10.1.1.103

    返回:LaunchTaskActionSet[0],attempt_201404230054_0001_m_000004_0

    处理:startNewTask(tip)

    启动JVM,命令: 

    bash -c/tmp/hadoop-admin/mapred/local/ttprivate/taskTracker/admin/jobcache/job_201404230054_0001/attempt_201404230054_0001_m_000004_0/taskjvm.sh

     

    其中taskjvm.sh内容: 

    [/opt/jdk1.7.0_07/jre/bin/java,-Djava.library.path=/opt/hadoop-1.0.0/libexec/../lib/native/Linux-amd64-64:/tmp/hadoop-admin/mapred/local/taskTracker/admin/jobcache/job_201404230054_0001/attempt_201404230054_0001_r_000003_0/work,-Xmx200m, -Djava.net.preferIPv4Stack=true,-Dhadoop.metrics.log.level=WARN,-Djava.io.tmpdir=/tmp/hadoop-admin/mapred/local/taskTracker/admin/jobcache/job_201404230054_0001/attempt_201404230054_0001_r_000003_0/work/tmp,-classpath,/opt/hadoop-1.0.0/libexec/../conf:/opt/jdk1.7.0_07/lib/tools.jar:/opt/hadoop-1.0.0/libexec/..:/opt/hadoop-1.0.0/libexec/../hadoop-core-1.0.0.jar:/opt/hadoop-1.0.0/libexec/../lib/asm-3.2.jar:/opt/hadoop-1.0.0/libexec/../lib/aspectjrt-1.6.5.jar:/opt/hadoop-1.0.0/libexec/../lib/aspectjtools-1.6.5.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-beanutils-1.7.0.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-beanutils-core-1.8.0.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-cli-1.2.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-codec-1.4.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-collections-3.2.1.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-configuration-1.6.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-daemon-1.0.1.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-digester-1.8.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-el-1.0.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-httpclient-3.0.1.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-lang-2.4.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-logging-1.1.1.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-logging-api-1.0.4.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-math-2.1.jar:/opt/hadoop-1.0.0/libexec/../lib/commons-net-1.4.1.jar:/opt/hadoop-1.0.0/libexec/../lib/core-3.1.1.jar:/opt/hadoop-1.0.0/libexec/../lib/hadoop-capacity-scheduler-1.0.0.jar:/opt/hadoop-1.0.0/libexec/../lib/hadoop-fairscheduler-1.0.0.jar:/opt/hadoop-1.0.0/libexec/../lib/hadoop-thriftfs-1.0.0.jar:/opt/hadoop-1.0.0/libexec/../lib/hsqldb-1.8.0.10.jar:/opt/hadoop-1.0.0/libexec/../lib/jackson-core-asl-1.0.1.jar:/opt/hadoop-1.0.0/libexec/../lib/jackson-mapper-asl-1.0.1.jar:/opt/hadoop-1.0.0/libexec/../lib/jasper-compiler-5.5.12.jar:/opt/hadoop-1.0.0/libexec/../lib/jasper-runtime-5.5.12.jar:/opt/hadoop-1.0.0/libexec/../lib/jdeb-0.8.jar:/opt/hadoop-1.0.0/libexec/../lib/jersey-core-1.8.jar:/opt/hadoop-1.0.0/libexec/../lib/jersey-json-1.8.jar:/opt/hadoop-1.0.0/libexec/../lib/jersey-server-1.8.jar:/opt/hadoop-1.0.0/libexec/../lib/jets3t-0.6.1.jar:/opt/hadoop-1.0.0/libexec/../lib/jetty-6.1.26.jar:/opt/hadoop-1.0.0/libexec/../lib/jetty-util-6.1.26.jar:/opt/hadoop-1.0.0/libexec/../lib/jsch-0.1.42.jar:/opt/hadoop-1.0.0/libexec/../lib/junit-4.5.jar:/opt/hadoop-1.0.0/libexec/../lib/kfs-0.2.2.jar:/opt/hadoop-1.0.0/libexec/../lib/log4j-1.2.15.jar:/opt/hadoop-1.0.0/libexec/../lib/mockito-all-1.8.5.jar:/opt/hadoop-1.0.0/libexec/../lib/oro-2.0.8.jar:/opt/hadoop-1.0.0/libexec/../lib/servlet-api-2.5-20081211.jar:/opt/hadoop-1.0.0/libexec/../lib/slf4j-api-1.4.3.jar:/opt/hadoop-1.0.0/libexec/../lib/slf4j-log4j12-1.4.3.jar:/opt/hadoop-1.0.0/libexec/../lib/xmlenc-0.52.jar:/opt/hadoop-1.0.0/libexec/../lib/jsp-2.1/jsp-2.1.jar:/opt/hadoop-1.0.0/libexec/../lib/jsp-2.1/jsp-api-2.1.jar:/tmp/hadoop-admin/mapred/local/taskTracker/admin/jobcache/job_201404230054_0001/jars/classes:/tmp/hadoop-admin/mapred/local/taskTracker/admin/jobcache/job_201404230054_0001/jars:/tmp/hadoop-admin/mapred/local/taskTracker/admin/jobcache/job_201404230054_0001/attempt_201404230054_0001_r_000003_0/work,-Dhadoop.log.dir=/opt/hadoop-1.0.0/logs,-Dhadoop.root.logger=INFO,TLA,-Dhadoop.tasklog.taskid=attempt_201404230054_0001_r_000003_0,-Dhadoop.tasklog.iscleanup=false,-Dhadoop.tasklog.totalLogFileSize=0, org.apache.hadoop.mapred.Child, 127.0.0.1,48838, attempt_201404230054_0001_r_000003_0,/opt/hadoop-1.0.0/logs/userlogs/job_201404230054_0001/attempt_201404230054_0001_r_000003_0]


  • 相关阅读:
    selenium 手写xpath
    pythonselenium面试常问问题
    pytest介绍
    pytest + allure 生成自测报告
    python自动化接口面试题
    接口测试面试题汇总
    Eclipse 视图与视窗
    Access数据库密码为不可见字符的处理方式
    eclipse 导出jar 没有主清单属性的解决方法
    如何在 Windows 下编译 OpenSSL VS2013
  • 原文地址:https://www.cnblogs.com/leeeee/p/7276485.html
Copyright © 2020-2023  润新知