• spark deploy上传到私服


    一 parent下pom.xml新增

    1.私服地址
     <distributionManagement>
        <repository>
          <id>bilibili-nexus-releases</id>
          <name>Nexus Release Repository</name>
          <url>http://xxx/content/repositories/releases/</url>
        </repository>
        <snapshotRepository>
          <id>bilibili-nexus-snapshots</id>
          <name>Nexus Snapshot Repository</name>
          <url>http://xxx/content/repositories/snapshots/</url>
        </snapshotRepository>
      </distributionManagement>
    
    2. scala语法检查,若不修改编译不能通过
    <failOnViolation>true</failOnViolation> 
    修改为 
    <failOnViolation>false</failOnViolation>
    
    3.maven插件版本修改成本地一致(这个不一定要修改)
    <plugin>
      <groupId>org.apache.maven.plugins</groupId>
      <artifactId>maven-shade-plugin</artifactId>
      <version>3.2.0</version>
    </plugin>
    
    4.dependencies依赖需加上
    
          <dependency>
            <groupId>com.cenqua.clover</groupId>
            <artifactId>clover</artifactId>
            <!-- Use the version needed by maven-clover-plugin -->
            <version>3.0.2</version>
          </dependency>
    4.maven-surefire-plugin插件里面添加这个
    <artifactId>maven-surefire-plugin</artifactId>
    <configuration>
    <testFailureIgnore>true</testFailureIgnore>
    
    


    二 修改各依赖的pom.xml 

    spark-avro_2.11

    <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-core_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-sql_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>

    spark-catalyst_2.11

     <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    spark-graphx2.11

     <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-core_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>

    spark-hive_2.11

     <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->
        <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-sql_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->
        <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
          <!--<type>test-jar</type>-->
          <!--<version>${project.version}</version>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    spark-mllib_2.11

     <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->
       
        <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->
        <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-sql_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    kafka-0-8

    <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-core_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>

    spark-repl_2.11

     <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    spark-sql-kafka-0-10_2.11

    <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-core_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>
        <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-sql_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>

    spark-sql_2.11

    <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->
        <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    spark-streaming-kafka-0-10_2.11

     <dependency>
          <groupId>org.apache.spark</groupId>
          <artifactId>spark-core_${scala.binary.version}</artifactId>
          <version>${project.version}</version>
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        </dependency>

    spark-streaming_2.11

     <!--<dependency>-->
          <!--<groupId>org.apache.spark</groupId>-->
          <!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
          <!--<version>${project.version}</version>-->
          <!--<type>test-jar</type>-->
          <!--<scope>test</scope>-->
        <!--</dependency>-->

    三 编译命令

    mvn clean deploy -Dmaven.test.skip=true -U
  • 相关阅读:
    springboot入门系列(一):简单搭建springboot项目
    springboot入门系列(二):SpringBoot整合Swagger
    springboot入门系列(三):SpringBoot教程之RabbitMQ示例
    springboot入门系列(四):SpringBoot和Mybatis配置多数据源连接多个数据库
    Linux下安装RabbitMQ
    Mybatis原理之数据源和连接池
    springboot入门系列(五):SpringBoot连接多RabbitMQ源
    jsp中<c:foreach>分页标签的序号问题
    Java中删除一个文件夹下的所有文件(包括子目录内的文件)
    接口的幂等性
  • 原文地址:https://www.cnblogs.com/songchaolin/p/13606466.html
Copyright © 2020-2023  润新知