• CDHkafka脚本


    启动客户端的命令

    /opt/cloudera/parcels/KAFKA-4.0.0-1.4.0.0.p0.1/bin/kafka-console-producer --broker-list hadoop102:9092 --topic topic_start

    去上面目录下找到kafka-console-consumer

    #!/bin/bash
      # Reference: http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in
      SOURCE="${BASH_SOURCE[0]}"
      BIN_DIR="$( dirname "$SOURCE" )"
      while [ -h "$SOURCE" ]
      do
        SOURCE="$(readlink "$SOURCE")"
        [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
        BIN_DIR="$( cd -P "$( dirname "$SOURCE"  )" && pwd )"
      done
      BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
      LIB_DIR=$BIN_DIR/../lib
    
    # Autodetect JAVA_HOME if not defined
    if [ -e $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome ] ; then
      . $LIB_DIR/../../CDH/lib/bigtop-utils/bigtop-detect-javahome
    fi
    
    exec $LIB_DIR/kafka/bin/kafka-console-producer.sh "$@"

    BASH_SOURCE[0] - 等价于 BASH_SOURCE ,取得当前执行的 shell 文件所在的路径及文件名${BASH_SOURCE[0]}表示bash脚本的第一个参数(如果第一个参数是bash,表明这是要执行bash脚本,这时"${BASH_SOURCE[0]}"自动转换为第二个参数),例如:

    bash modules/tools/planning_traj_plot/run.sh,${BASH_SOURCE[0]}代表的是modules/tools/planning_traj_plot/run.sh。

    dirname - 去除文件名中的非目录部分,仅显示与目录有关的部分,即提取文件的目录,例如modules/tools/planning_traj_plot/run.sh”的目录为"modules/tools/planning_traj_plot。
    $() - 相当于 `command`, 即获取command命令的结果
    && - 逻辑运算符号,只有当&&左边运行成功时才会运行&&右边的命令

    cd -P 表示切换到真正的物理目录,而不是软连接目录

     综上,BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )",表示当前脚本的绝对路径
    LIB_DIR=$BIN_DIR/../lib,表示当前脚本上一层目录下的lib目录
    -e filename 如果 filename存在,则为真
    -d filename 如果 filename为目录,则为真 
    -f filename 如果 filename为常规文件,则为真
    -L filename 如果 filename为符号链接,则为真
    -r filename 如果 filename可读,则为真 
    -w filename 如果 filename可写,则为真 
    -x filename 如果 filename可执行,则为真
    -s filename 如果文件长度不为0,则为真
    -h filename 如果文件是软链接,则为真
    filename1 -nt filename2 如果 filename1比 filename2新,则为真。
    filename1 -ot filename2 如果 filename1比 filename2旧,则为真。
    -eq 等于
    -ne 不等于
    -gt 大于
    -ge 大于等于
    -lt 小于
    -le 小于等于

    于是在lib的/kafka/bin下,找到脚本kafka-console-producer.sh

    #!/bin/bash
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    
    if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
        export KAFKA_HEAP_OPTS="-Xmx512M"
    fi
    exec $(dirname $0)/kafka-run-class.sh kafka.tools.ConsoleProducer "$@"
    dirname $0,取得当前执行的脚本文件的目录
    找到了和当前脚本同一目录的kafka-run-class.sh文件
    #!/bin/bash
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    
    if [ $# -lt 1 ];
    then
      echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]"
      exit 1
    fi
    
    # CYGINW == 1 if Cygwin is detected, else 0.
    if [[ $(uname -a) =~ "CYGWIN" ]]; then
      CYGWIN=1
    else
      CYGWIN=0
    fi
    
    if [ -z "$INCLUDE_TEST_JARS" ]; then
      INCLUDE_TEST_JARS=false
    fi
    
    # Exclude jars not necessary for running commands.
    regex="(-(test|test-sources|src|scaladoc|javadoc).jar|jar.asc)$"
    should_include_file() {
      if [ "$INCLUDE_TEST_JARS" = true ]; then
        return 0
      fi
      file=$1
      if [ -z "$(echo "$file" | egrep "$regex")" ] ; then
        return 0
      else
        return 1
      fi
    }
    
    base_dir=$(dirname $0)/..
    
    if [ -z "$SCALA_VERSION" ]; then
      SCALA_VERSION=2.11.12
    fi
    
    if [ -z "$SCALA_BINARY_VERSION" ]; then
      SCALA_BINARY_VERSION=$(echo $SCALA_VERSION | cut -f 1-2 -d '.')
    fi
    
    # run ./gradlew copyDependantLibs to get all dependant jars in a local dir
    shopt -s nullglob
    for dir in "$base_dir"/core/build/dependant-libs-${SCALA_VERSION}*;
    do
      CLASSPATH="$CLASSPATH:$dir/*"
    done
    
    for file in "$base_dir"/examples/build/libs/kafka-examples*.jar;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then
      clients_lib_dir=$(dirname $0)/../clients/build/libs
      streams_lib_dir=$(dirname $0)/../streams/build/libs
      rocksdb_lib_dir=$(dirname $0)/../streams/build/dependant-libs-${SCALA_VERSION}
    else
      clients_lib_dir=/opt/kafka-$UPGRADE_KAFKA_STREAMS_TEST_VERSION/libs
      streams_lib_dir=$clients_lib_dir
      rocksdb_lib_dir=$streams_lib_dir
    fi
    
    
    for file in "$clients_lib_dir"/kafka-clients*.jar;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    for file in "$streams_lib_dir"/kafka-streams*.jar;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    if [ -z "$UPGRADE_KAFKA_STREAMS_TEST_VERSION" ]; then
      for file in "$base_dir"/streams/examples/build/libs/kafka-streams-examples*.jar;
      do
        if should_include_file "$file"; then
          CLASSPATH="$CLASSPATH":"$file"
        fi
      done
    else
      VERSION_NO_DOTS=`echo $UPGRADE_KAFKA_STREAMS_TEST_VERSION | sed 's/.//g'`
      SHORT_VERSION_NO_DOTS=${VERSION_NO_DOTS:0:((${#VERSION_NO_DOTS} - 1))} # remove last char, ie, bug-fix number
      for file in "$base_dir"/streams/upgrade-system-tests-$SHORT_VERSION_NO_DOTS/build/libs/kafka-streams-upgrade-system-tests*.jar;
      do
        if should_include_file "$file"; then
          CLASSPATH="$file":"$CLASSPATH"
        fi
      done
    fi
    
    for file in "$rocksdb_lib_dir"/rocksdb*.jar;
    do
      CLASSPATH="$CLASSPATH":"$file"
    done
    
    for file in "$base_dir"/tools/build/libs/kafka-tools*.jar;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    for dir in "$base_dir"/tools/build/dependant-libs-${SCALA_VERSION}*;
    do
      CLASSPATH="$CLASSPATH:$dir/*"
    done
    
    for cc_pkg in "api" "transforms" "runtime" "file" "json" "tools" "basic-auth-extension"
    do
      for file in "$base_dir"/connect/${cc_pkg}/build/libs/connect-${cc_pkg}*.jar;
      do
        if should_include_file "$file"; then
          CLASSPATH="$CLASSPATH":"$file"
        fi
      done
      if [ -d "$base_dir/connect/${cc_pkg}/build/dependant-libs" ] ; then
        CLASSPATH="$CLASSPATH:$base_dir/connect/${cc_pkg}/build/dependant-libs/*"
      fi
    done
    
    # classpath addition for release
    for file in "$base_dir"/libs/*;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    for file in "$base_dir"/core/build/libs/kafka_${SCALA_BINARY_VERSION}*.jar;
    do
      if should_include_file "$file"; then
        CLASSPATH="$CLASSPATH":"$file"
      fi
    done
    
    # Set SENTRY_HOME if possible and add Sentry jars to classpath
    if [[ -z "$SENTRY_HOME" ]]; then
      if [[ -d ${base_dir}/../sentry ]]; then
        export SENTRY_HOME=`readlink -m ${base_dir}/../sentry`
      fi
    fi
    if [[ -n "$SENTRY_HOME" ]]; then
      for f in ${SENTRY_HOME}/lib/*.jar ${SENTRY_HOME}/lib/plugins/*.jar; do
        export CLASSPATH=${CLASSPATH}:${f}
      done
    fi
    
    # Include the sentry configuration on the classpath
    if [ -z "$SENTRY_CONF_DIR" ]; then
     SENTRY_CONF_DIR="/etc/kafka/conf/sentry-conf"
    fi
    export CLASSPATH=${CLASSPATH}:${SENTRY_CONF_DIR}
    
    shopt -u nullglob
    
    if [ -z "$CLASSPATH" ] ; then
      echo "Classpath is empty. Please build the project first e.g. by running './gradlew jar -PscalaVersion=$SCALA_VERSION'"
      exit 1
    fi
    
    # JMX settings
    if [ -z "$KAFKA_JMX_OPTS" ]; then
      KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false  -Dcom.sun.management.jmxremote.ssl=false "
    fi
    
    # JMX port to use
    if [  $JMX_PORT ]; then
      KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT "
    fi
    
    # Log directory to use
    if [ "x$LOG_DIR" = "x" ]; then
      LOG_DIR="$base_dir/logs"
    fi
    
    # Log4j settings
    if [ -z "$KAFKA_LOG4J_OPTS" ]; then
      # Log to console. This is a tool.
      LOG4J_DIR="$base_dir/config/tools-log4j.properties"
      # If Cygwin is detected, LOG4J_DIR is converted to Windows format.
      (( CYGWIN )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}")
      KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}"
    else
      # create logs directory
      if [ ! -d "$LOG_DIR" ]; then
        mkdir -p "$LOG_DIR"
      fi
    fi
    
    # If Cygwin is detected, LOG_DIR is converted to Windows format.
    (( CYGWIN )) && LOG_DIR=$(cygpath --path --mixed "${LOG_DIR}")
    KAFKA_LOG4J_OPTS="-Dkafka.logs.dir=$LOG_DIR $KAFKA_LOG4J_OPTS"
    
    # Generic jvm settings you want to add
    if [ -z "$KAFKA_OPTS" ]; then
      KAFKA_OPTS=""
    fi
    
    # Set Debug options if enabled
    if [ "x$KAFKA_DEBUG" != "x" ]; then
    
        # Use default ports
        DEFAULT_JAVA_DEBUG_PORT="5005"
    
        if [ -z "$JAVA_DEBUG_PORT" ]; then
            JAVA_DEBUG_PORT="$DEFAULT_JAVA_DEBUG_PORT"
        fi
    
        # Use the defaults if JAVA_DEBUG_OPTS was not set
        DEFAULT_JAVA_DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=${DEBUG_SUSPEND_FLAG:-n},address=$JAVA_DEBUG_PORT"
        if [ -z "$JAVA_DEBUG_OPTS" ]; then
            JAVA_DEBUG_OPTS="$DEFAULT_JAVA_DEBUG_OPTS"
        fi
    
        echo "Enabling Java debug options: $JAVA_DEBUG_OPTS"
        KAFKA_OPTS="$JAVA_DEBUG_OPTS $KAFKA_OPTS"
    fi
    
    # Which java to use
    if [ -z "$JAVA_HOME" ]; then
      JAVA="java"
    else
      JAVA="$JAVA_HOME/bin/java"
    fi
    
    # Memory options
    if [ -z "$KAFKA_HEAP_OPTS" ]; then
      KAFKA_HEAP_OPTS="-Xmx256M"
    fi
    
    # JVM performance options
    if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then
      KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -Djava.awt.headless=true"
    fi
    
    # version option
    for args in "$@" ; do
      if [ "$args" = "--version" ]; then
        exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "kafka.utils.VersionInfo"
      fi
    done
    
    while [ $# -gt 0 ]; do
      COMMAND=$1
      case $COMMAND in
        -name)
          DAEMON_NAME=$2
          CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out
          shift 2
          ;;
        -loggc)
          if [ -z "$KAFKA_GC_LOG_OPTS" ]; then
            GC_LOG_ENABLED="true"
          fi
          shift
          ;;
        -daemon)
          DAEMON_MODE="true"
          shift
          ;;
        *)
          break
          ;;
      esac
    done
    
    # GC options
    GC_FILE_SUFFIX='-gc.log'
    GC_LOG_FILE_NAME=''
    if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then
      GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX
    
      # The first segment of the version number, which is '1' for releases before Java 9
      # it then becomes '9', '10', ...
      # Some examples of the first line of `java --version`:
      # 8 -> java version "1.8.0_152"
      # 9.0.4 -> java version "9.0.4"
      # 10 -> java version "10" 2018-03-20
      # 10.0.1 -> java version "10.0.1" 2018-04-17
      # We need to match to the end of the line to prevent sed from printing the characters that do not match
      JAVA_MAJOR_VERSION=$($JAVA -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/1/p')
      if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]] ; then
        KAFKA_GC_LOG_OPTS="-Xlog:gc*:file=$LOG_DIR/$GC_LOG_FILE_NAME:time,tags:filecount=10,filesize=102400"
      else
        KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=100M"
      fi
    fi
    
    # Remove a possible colon prefix from the classpath (happens at lines like `CLASSPATH="$CLASSPATH:$file"` when CLASSPATH is blank)
    # Syntax used on the right side is native Bash string manipulation; for more details see
    # http://tldp.org/LDP/abs/html/string-manipulation.html, specifically the section titled "Substring Removal"
    CLASSPATH=${CLASSPATH#:}
    
    # If Cygwin is detected, classpath is converted to Windows format.
    (( CYGWIN )) && CLASSPATH=$(cygpath --path --mixed "${CLASSPATH}")
    
    # Launch mode
    if [ "x$DAEMON_MODE" = "xtrue" ]; then
      nohup $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null &
    else
      exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@"
    fi
  • 相关阅读:
    linux网络流控htb算法简析
    iptables 使用 转载
    tc分析转载
    Linux流量控制(TC)之表面 转载
    linux TC打标和限速
    海外华人分布总数突破6000万(转载)
    异步消息处理中Timestamp类型字段值为0转换json问题
    IDEA debug启动项目断点调试时依赖模块版本号不一致问题
    访问页面空白js文件状态failed(ERR_CONTENT_LENGTH_MISMATCH)问题
    我的LMDE系统udev规则配置
  • 原文地址:https://www.cnblogs.com/aidata/p/11893339.html
Copyright © 2020-2023  润新知