一、安装启动
1.地址:https://elasticsearch.cn/download
2.解压 :tar -zxvf elasticsearch...
3.修改 elasticsearch配置文件 vim config/elasticsearch.yml 在最下面添加
ik分词器地址:https://github.com/medcl/elasticsearch-analysis-ik
安装ik分词器:https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v7.6.1/elasticsearch-analysis-ik-7.6.1.zip
解压放在plugins/ik/下
http.cors.enabled: true
http.cors.allow-origin: "*"
docker操作es
启动一个docker es 然后把配置文件目录复制出来
docker network create webservice_web-service #必须创建网
#这个就可以连接java
/usr/local/docker/elasticsearch/data #这个必须授权chmod 777 ...
docker run -d --restart unless-stopped --name elasticsearch --net webservice_web-service -p 9200:9200 -p 9300:9300 -v /usr/local/docker/elasticsearch/config:/usr/share/elasticsearch/config -v /usr/local/docker/elasticsearch/data:/usr/share/elasticsearch/data -v /usr/local/docker/elasticsearch/plugins:/usr/share/elasticsearch/plugins -e "discovery.type=single-node" elasticsearch:6.4.2
修改配置文件yml
network.host: 192.168.10.10 #这里可以写成0.0.0.0
http.port: 9200 #这里可以不配置,默认就好
http.cors.enabled: true
http.cors.allow-origin: "*" #配置允许跨域访问
http.cors.allow-headers: Authorization
4.后台启动
./elasticsearch -d #bin目录
5.安装图形界面
#去Github上下载 elasticsearch-head 插件
#安装nodejs运行环境
#下载nodejs解压进入目录 https://nodejs.org/dist/v9.9.0/node-v9.9.0-linux-x64.tar.xz
#解压 tar -xvf node-v9.9.0-linux-x64.tar.xz
mv node-v9.9.0-linux-x64 ~/
ln -s ~/node-v9.9.0-linux-x64/bin/node /usr/bin/node
ln -s ~/node-v9.9.0-linux-x64/bin/npm /usr/bin/npm
安装kibana
修改配置文件kibana.yml
server.host: "192.168.10.10"
elasticsearch.url: "http://192.168.10.10:9200"
6.启动图形界面
#进入elasticsearch-head 目录
npm install
npm run start
7.集群
#主配置
#修改yml配置文件 在最下面加入
http.cors.enabled: true
http.cors.allow-origin: "*"
cluster.name: kexianjun
node.name: master
node.master: true
network.host:127.0.0.1
#从配置
#yml配置文件添加如下 cluster.name需要个主一致 其他修改node.name和http.port即可
cluster.name: kexianjun
node.name: slave1
network.host: 127.0.0.1
http.port: 8200
discovery.zen.ping.unicast.hosts: ["127.0.0.1"]
#启动即可
启动并且复制配置文件
docker run --name elasticsearch -d elasticsearch:7.6.1
docker cp elasticsearch:/usr/share/elasticsearch/config ./
docker rm -f elasticsearch
docker-compose.yml
version: '3.1'
services:
elasticsearch:
image: elasticsearch:7.6.1
restart: always
container_name: elasticsearch
privileged: true
ports:
- 9200:9200
- 9300:9300
environment:
- discovery.type=single-node
- cluster.name=elasticsearch
- bootstrap.memory_lock=true
- http.cors.enabled=true
- http.cors.allow-origin=*
- ES_JAVA_OPTS=-Xms1g -Xmx1g
- network.host=0.0.0.0
- node.name=elasticsearch
- TZ=Asia/Shanghai
volumes:
- ./data:/usr/share/elasticsearch/data:rw
- ./config:/usr/share/elasticsearch/config
- ./logs:/usr/share/elasticsearch/logs:rw
- ./plugins:/usr/share/elasticsearch/plugins
- /etc/localtime:/etc/localtime
kibana:
image: kibana:7.6.1
restart: always
container_name: kibana
ports:
- 5601:5601
environment:
- SERVER_NAME=kibana
- ELASTICSEARCH_URL=http://elasticsearch:9200
- XPACK_MONITORING_ENABLED=true
- TZ=Asia/Shanghai
depends_on:
- elasticsearch
external_links:
- elasticsearch
networks:
default:
external:
name: kexianjun_network
启动不成功需要授权 :chmod 777 data logs
版本对应即可,要不然很难启动的,各种报错 但是java代码连接9300还是报错why?
二、基本使用
match_all 查询所有数据
match :指定一个字段
multi_match:指定多个字段
match_phrase:短语匹配查询,相当于mysql的模糊查询 必须有短语内容
match_phrase_prefix 前缀匹配查询
range 范围查询 from... to...
include_lower :true
fuzzy 错误矫正查询
highlight 高亮 需要指定字段
includes 包含xxx
excludes 不包含xxx
terms会分词 term不分词
聚合查询
aggs (sum和,avg平均 ,min最小 ,max最大)
aggs (cardinality互不形同的个数 terms分组)
集群docker-compose
version: '3.1'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
container_name: elasticsearch
restart: always
privileged: true
environment:
- node.name=es01
- cluster.name=kexianjun-cluster
- discovery.seed_hosts=es02,es03
- cluster.initial_master_nodes=elasticsearch,es02,es03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- ./es01/data:/usr/share/elasticsearch/data:rw
- ./es01/logs:/usr/share/elasticsearch/logs:rw
- ./es01/config:/usr/share/elasticsearch/config
- ./es01/plugins:/usr/share/elasticsearch/plugins
- /etc/localtime:/etc/localtime
ports:
- 9200:9200
- 9300:9300
es02:
image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
container_name: es02
restart: always
privileged: true
environment:
- node.name=es02
- cluster.name=kexianjun-cluster
- discovery.seed_hosts=elasticsearch,es03
- cluster.initial_master_nodes=elasticsearch,es02,es03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- ./es02/data:/usr/share/elasticsearch/data:rw
- ./es02/logs:/usr/share/elasticsearch/logs:rw
- ./es02/config:/usr/share/elasticsearch/config
- ./es02/plugins:/usr/share/elasticsearch/plugins
- /etc/localtime:/etc/localtime
es03:
image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
container_name: es03
restart: always
privileged: true
environment:
- node.name=es03
- cluster.name=kexianjun-cluster
- discovery.seed_hosts=elasticsearch,es02
- cluster.initial_master_nodes=elasticsearch,es02,es03
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- ./es03/data:/usr/share/elasticsearch/data:rw
- ./es03/logs:/usr/share/elasticsearch/logs:rw
- ./es03/config:/usr/share/elasticsearch/config
- ./es03/plugins:/usr/share/elasticsearch/plugins
- /etc/localtime:/etc/localtime
logstash:
image: logstash:7.6.1
container_name: logstash
restart: always
ports:
- 5044:5044
environment:
discovery.type: single-node
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
volumes:
- ./logstash/config/:/usr/share/logstash/config/
- ./logstash/pipeline/:/usr/share/logstash/pipeline/
depends_on:
- elasticsearch
kibana:
image: kibana:7.6.1
restart: always
container_name: kibana
ports:
- 5601:5601
environment:
- SERVER_NAME=kibana
- ELASTICSEARCH_URL=http://elasticsearch:9200
- XPACK_MONITORING_ENABLED=true
- /etc/localtime:/etc/localtime
depends_on:
- elasticsearch
external_links:
- elasticsearch
networks:
default:
external:
name: kexianjun_network