• dockercompose部署Elasticsearch7.14.1+kabana7.14.1+elasticsearchhead并设置账号密码


    学习资料

    https://www.bilibili.com/video/BV1eY411w7Lx/

    https://www.bilibili.com/video/BV1SQ4y1m7Ds?p=13

    仓库地址:

    https://gitee.com/haima1004/elasticsearch7.14.1_kabana.git

    环境:
    linux
    Docker version 19.03.15, build 99e3ed8919
    docker-compose version 1.25.5, build 8a1c60f6

    创建项目文件夹

    mkdir ./es-kabana

    当前目录下所有文件赋予权限(读、写、执行)

    chmod -R 777 ./es-kabana
    cd es-kabana

    docker-compose.yml

    vim docker-compose.yml

    version: '3'
    
    # 网桥es -> 方便相互通讯
    networks:
      es:
    
    services:
      elasticsearch:
        image: registry.cn-hangzhou.aliyuncs.com/zhengqing/elasticsearch:7.14.1      # 原镜像`elasticsearch:7.14.1`
        container_name: elasticsearch             # 容器名为'elasticsearch'
        restart: unless-stopped                           # 指定容器退出后的重启策略为始终重启,但是不考虑在Docker守护进程启动时就已经停止了的容器
        volumes:                                  # 数据卷挂载路径设置,将本机目录映射到容器目录
          - "./es/data:/usr/share/elasticsearch/data"
          - "./es/logs:/usr/share/elasticsearch/logs"
          - "./es/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml"
          - "./es/plugins/:/usr/share/elasticsearch/plugins"
          #- "./es/config/jvm.options:/usr/share/elasticsearch/config/jvm.options"
        environment:                              # 设置环境变量,相当于docker run命令中的-e
          TZ: Asia/Shanghai
          LANG: en_US.UTF-8
          discovery.type: single-node
          ES_JAVA_OPTS: "-Xmx512m -Xms512m"
          #ELASTIC_PASSWORD: "123456" # elastic账号密码
        ports:
          - "9200:9200"
          - "9300:9300"
        networks:
          - es
    
      kibana:
        image: registry.cn-hangzhou.aliyuncs.com/zhengqing/kibana:7.14.1       # 原镜像`kibana:7.14.1`
        container_name: kibana
        restart: unless-stopped
        volumes:
          - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml
          - ./kibana/logs:/usr/share/kibana/logs
        environment:
          TZ: Asia/Shanghai    # 更改容器时区为 CST(默认为UTC)
          LANG: en_US.UTF-8  
        ports:
          - "5601:5601"
        depends_on:
          - elasticsearch
        links:
          - elasticsearch
        networks:
          - es
      elasticsearch-head:
        image: wallbase/elasticsearch-head:6-alpine
        container_name: elasticsearch-head
        restart: unless-stopped
        environment:
          TZ: 'Asia/Shanghai'
        ports:
          - '9100:9100'
        networks:
          - es      
     
    

    kibana.yml

    vim kibana/config/kibana.yml

    #
    # ** THIS IS AN AUTO-GENERATED FILE **
    #
    
    # Default Kibana configuration for docker target
    
    server.name: kibana
    server.host: "0.0.0.0"
    elasticsearch.hosts: [ "http://elasticsearch:9200" ] # http://www.zhengqingya.com:9200 TODO 修改为自己的ip
    xpack.monitoring.ui.container.elasticsearch.enabled: true
    #elasticsearch.username: "elastic"  # es账号
    #elasticsearch.password: "123456"   # es密码
    i18n.locale: zh-CN # 中文
    
    

    elasticsearch.yml

    vim /es/config/elasticsearch.yml

    cluster.name: "docker-cluster"
    network.host: 0.0.0.0
    http.port: 9200
    # 开启es跨域
    http.cors.enabled: true
    http.cors.allow-origin: "*"
    http.cors.allow-headers: Authorization,Content-Type
    # 开启安全控制
    #xpack.security.enabled: true
    #xpack.security.transport.ssl.enabled: true
    
    

    分词器下载地址:
    https://github.com/medcl/elasticsearch-analysis-ik/releases/tag/v7.14.1

    目标结构

    [root@HmEduCentos01 elasticsearch]# tree
    .
    ├── docker-compose.yml
    ├── es
    │   ├── config
    │   │   └── elasticsearch.yml
    │   ├── data
    │   ├── plugins
    │   │   └── analysis-ik-7.14.1
    │   └── logs
    ├── kibana
    │   ├── config
    │   │   └── kibana.yml
    └──└── logs
    
    

    注意:
    第一次运行报错,是因为创建出来的文件夹权限不够
    创建出来的es data logs等文件夹,再赋予一下777权限
    chmod -R 777 ./es-kabana
    然后再删除容器,重新运行

    [root@HmEduCentos01 elasticsearch]# ll
    总用量 12
    -rwxrwxrwx. 1 root root 1567 4月  23 01:57 docker-compose.yml
    drwxrwxrwx. 5 root root 4096 4月  23 01:55 es
    -rwxrwxrwx. 1 root root  430 4月  23 01:46 kibana.yml
    

    常用命令:

    docker-compose up -d #后台启动
    docker-compose down  #停止
    
    [root@HmEduCentos01 docker]# docker ps -a
    CONTAINER ID   IMAGE                                                              COMMAND                  CREATED          STATUS          PORTS                                                                                  NAMES
    1c5b971d99c3   registry.cn-hangzhou.aliyuncs.com/zhengqing/kibana:7.14.1          "/bin/tini -- /usr/l…"   41 minutes ago   Up 41 minutes   0.0.0.0:5601->5601/tcp, :::5601->5601/tcp                                              kibana
    3ca20dcf4bd4   registry.cn-hangzhou.aliyuncs.com/zhengqing/elasticsearch:7.14.1   "/bin/tini -- /usr/l…"   41 minutes ago   Up 41 minutes   0.0.0.0:9200->9200/tcp, :::9200->9200/tcp, 0.0.0.0:9300->9300/tcp, :::9300->9300/tcp   elasticsearch
    1af7cf5fd1ad   wallbase/elasticsearch-head:6-alpine                               "/bin/sh -c 'node_mo…"   41 minutes ago   Up 41 minutes   0.0.0.0:9100->9100/tcp, :::9100->9100/tcp                                              elasticsearch-head
    
    

    访问

    ES访问地址:ip地址:9200
    默认账号密码:elastic/123456 #未设置即为空
    kibana访问地址:ip地址:5601/app/dev_tools#/console
    默认账号密码:elastic/123456 #未设置即为空
    elasticsearch-head地址:ip地址:9100

    GET _search
    {
      "query":{
        "match_all":{}
      }
    }
    

    参考文档:

    https://blog.csdn.net/qq_38225558/article/details/120580394?utm_medium=distribute.pc_aggpage_search_result.none-task-blog-2~aggregatepage~first_rank_ecpm_v1~rank_v31_ecpm-3-120580394.pc_agg_new_rank&utm_term=ES%E8%AE%BE%E7%BD%AE%E5%AF%86%E7%A0%81&spm=1000.2123.3001.4430
    

    logstash软件下载

    华为源
    这里我下载:logstash-7.14.1-linux-x86_64.tar.gz 大家根据自己的系统下载对应的版本
    https://mirrors.huaweicloud.com/logstash/7.14.1/

    官网地址:
    https://www.elastic.co/cn/downloads/past-releases/logstash-7-14-1
    这里我下载Linux x86_64的,大家根据自己的系统下载对应的版本

    logstash导入movies.csv数据

    1. 进入/elasticsearch7.14.1_kabana/logstash-7.14.1目录
    2. 新建名为 logstash.conf 的文件.
    input {
      file {
        # 引号的的内容为 movies.csv 的实际路径,根据实际情况
        path => "/home/haima/local/docker/es-kabana/logstash-7.14.1/movies.csv"
        start_position => "beginning"
        # 日志目录
        sincedb_path => "/home/haima/local/docker/es-kabana/logstash-7.14.1/db_path.log"
      }
    }
    filter {
      csv {
        separator => ","
        columns => ["id","content","genre"]
      }
    
      mutate {
        split => { "genre" => "|" }
        remove_field => ["path", "host","@timestamp","message"]
      }
    
      mutate {
    
        split => ["content", "("]
        add_field => { "title" => "%{[content][0]}"}
        add_field => { "year" => "%{[content][1]}"}
      }
    
      mutate {
        convert => {
          "year" => "integer"
        }
        strip => ["title"]
        remove_field => ["path", "host","@timestamp","message","content"]
      }
    
    }
    output {
       elasticsearch {
         # 双引号中的内容为ES的地址,视实际情况而定
         hosts => "http://localhost:9200"
         index => "movies"
         document_id => "%{id}"
       }
      stdout {}
    }
    
    
    
    1. 执行导入命令:
      打开dos命令行,进入到 logstash 的 bin 目录下,执行如下命令导入 movies 的数据

    linux命令

    logstash -f ../config/logstash.conf

    windos命令

    logstash.bat -f D:\logstash-datas\config\logstash.co

    2.4.3 验证进入到 kibana 的命令行页面,执行 GET _cat/indices 验证数据是否成功

  • 相关阅读:
    Java学习笔记
    JSP/Servlet笔记
    JavaScript笔记
    JavaScript笔记
    JavaScript笔记 – 程序语法设计
    Mybatis笔记
    Mybatis笔记
    Mybatis笔记 – 关联查询
    Mybatis笔记 – Po映射类型
    Mybatis笔记
  • 原文地址:https://www.cnblogs.com/haima/p/15817428.html
Copyright © 2020-2023  润新知