########################ElasticSearch#######################
環境:
192.168.125.200 elasticsearch+logstash+kibana node-1
192.168.125.201 elasticsearch+logstash node-2
192.168.125.202 nginx+Filebeat node-3
192.168.125.203 nginx+filebeat node-4
配置:
主機名互相解析
環境:
192.168.125.200 elasticsearch+logstash+kibana node-1
192.168.125.201 elasticsearch+logstash node-2
192.168.125.202 nginx+Filebeat node-3
192.168.125.203 nginx+filebeat node-4
配置:
主機名互相解析
1,安裝elasticsearch。如果是實驗環境需要配置虛擬機內存為1G。
(1)配置yum環境,安裝elk6的版本。
[elk-6]
name=elk-6
baseurl=https://artifacts.elastic.co/packages/6.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
(2)配置打開文件句柄數。
[root@localhost ~]# cat /etc/security/limits.conf
* soft nofile 65536
* hard nofile 131072
* soft nproc 2048
* hard nproc 4096
[root@localhost ~]# cat /etc/security/limits.d/90-nproc.conf
* soft nproc 4096 //修改大於2048
root soft nproc unlimited
(3)進行yum安裝elasticsearch,需要安裝java8,請自行安裝,此處不做詳細。
(1)配置yum環境,安裝elk6的版本。
[elk-6]
name=elk-6
baseurl=https://artifacts.elastic.co/packages/6.x/yum
gpgcheck=1
gpgkey=https://artifacts.elastic.co/GPG-KEY-elasticsearch
enabled=1
(2)配置打開文件句柄數。
[root@localhost ~]# cat /etc/security/limits.conf
* soft nofile 65536
* hard nofile 131072
* soft nproc 2048
* hard nproc 4096
[root@localhost ~]# cat /etc/security/limits.d/90-nproc.conf
* soft nproc 4096 //修改大於2048
root soft nproc unlimited
(3)進行yum安裝elasticsearch,需要安裝java8,請自行安裝,此處不做詳細。
(4)配置elasticsearch.
修改jvm內存:
[root@localhost ~]# cat /etc/elasticsearch/jvm.options
## GC configuration
-XX:+UseConcMarkSweepGC
-XX:CMSInitiatingOccupancyFraction=75
-XX:+UseCMSInitiatingOccupancyOnly
-XX:ParallelGCThreads=1 //指定並行GC線程數。
配置elasticsearch文件:200和201都是一樣的配置。
[root@localhost ~]# grep -v ^#.* /etc/elasticsearch/elasticsearch.yml
cluster.name: ELK
node.name: node-1
path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: false
bootstrap.system_call_filter: false
network.host: 192.168.125.200
discovery.zen.ping.unicast.hosts: ["192.168.125.200", "192.168.125.201"]
啟動elasticsearch:
[root@localhost ~]# ss -tunlp
9200/9300
9300是tcp通訊端口,集群間和TCPClient都走的它,用於事務通信,9200是http協議的RESTful接口
修改jvm內存:
[root@localhost ~]# cat /etc/elasticsearch/jvm.options
## GC configuration
-XX:+UseConcMarkSweepGC
-XX:CMSInitiatingOccupancyFraction=75
-XX:+UseCMSInitiatingOccupancyOnly
-XX:ParallelGCThreads=1 //指定並行GC線程數。
配置elasticsearch文件:200和201都是一樣的配置。
[root@localhost ~]# grep -v ^#.* /etc/elasticsearch/elasticsearch.yml
cluster.name: ELK
node.name: node-1
path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
bootstrap.memory_lock: false
bootstrap.system_call_filter: false
network.host: 192.168.125.200
discovery.zen.ping.unicast.hosts: ["192.168.125.200", "192.168.125.201"]
啟動elasticsearch:
[root@localhost ~]# ss -tunlp
9200/9300
9300是tcp通訊端口,集群間和TCPClient都走的它,用於事務通信,9200是http協議的RESTful接口
查看集群狀態方法:
1,[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cat/nodes?v'
ip heap.percent ram.percent cpu load_1m load_5m load_15m node.role master name
192.168.125.201 16 93 93 2.55 1.54 0.64 mdi - node-2
192.168.125.200 17 77 90 2.85 1.64 0.67 mdi * node-1
1,[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cat/nodes?v'
ip heap.percent ram.percent cpu load_1m load_5m load_15m node.role master name
192.168.125.201 16 93 93 2.55 1.54 0.64 mdi - node-2
192.168.125.200 17 77 90 2.85 1.64 0.67 mdi * node-1
2,[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cluster/state/nodes?pretty'
{
"cluster_name" : "ELK",
"compressed_size_in_bytes" : 287,
"nodes" : {
"22ItZADpTjym_G0aQeahqg" : {
"name" : "node-2",
"ephemeral_id" : "RSPI-3_gQnKhN0OH-FslHw",
"transport_address" : "192.168.125.201:9300",
"attributes" : { }
},
"G63XiHorRr2PHDAWUaOmuQ" : {
"name" : "node-1",
"ephemeral_id" : "KeuW_0oEQBOaQjn5CU4jzw",
"transport_address" : "192.168.125.200:9300",
"attributes" : { }
}
}
}
3,查詢master
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cluster/state/master_node?pretty'
{
"cluster_name" : "ELK",
"compressed_size_in_bytes" : 287,
"master_node" : "G63XiHorRr2PHDAWUaOmuQ"
}
4,查看集群狀態
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cluster/health?pretty'
{
"cluster_name" : "ELK",
"status" : "green",
"timed_out" : false,
"number_of_nodes" : 2,
"number_of_data_nodes" : 2,
"active_primary_shards" : 0,
"active_shards" : 0,
"relocating_shards" : 0,
"initializing_shards" : 0,
"unassigned_shards" : 0,
"delayed_unassigned_shards" : 0,
"number_of_pending_tasks" : 0,
"number_of_in_flight_fetch" : 0,
"task_max_waiting_in_queue_millis" : 0,
"active_shards_percent_as_number" : 100.0
}
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cat/health?v'
epoch timestamp cluster status node.total node.data shards pri relo init unassign pending_tasks max_task_wait_time active_shards_percent
1522313584 16:53:04 ELK green 2 2 0 0 0 0 0 0 - 100.0%
{
"cluster_name" : "ELK",
"compressed_size_in_bytes" : 287,
"nodes" : {
"22ItZADpTjym_G0aQeahqg" : {
"name" : "node-2",
"ephemeral_id" : "RSPI-3_gQnKhN0OH-FslHw",
"transport_address" : "192.168.125.201:9300",
"attributes" : { }
},
"G63XiHorRr2PHDAWUaOmuQ" : {
"name" : "node-1",
"ephemeral_id" : "KeuW_0oEQBOaQjn5CU4jzw",
"transport_address" : "192.168.125.200:9300",
"attributes" : { }
}
}
}
3,查詢master
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cluster/state/master_node?pretty'
{
"cluster_name" : "ELK",
"compressed_size_in_bytes" : 287,
"master_node" : "G63XiHorRr2PHDAWUaOmuQ"
}
4,查看集群狀態
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cluster/health?pretty'
{
"cluster_name" : "ELK",
"status" : "green",
"timed_out" : false,
"number_of_nodes" : 2,
"number_of_data_nodes" : 2,
"active_primary_shards" : 0,
"active_shards" : 0,
"relocating_shards" : 0,
"initializing_shards" : 0,
"unassigned_shards" : 0,
"delayed_unassigned_shards" : 0,
"number_of_pending_tasks" : 0,
"number_of_in_flight_fetch" : 0,
"task_max_waiting_in_queue_millis" : 0,
"active_shards_percent_as_number" : 100.0
}
[root@node-1 ~]# curl -XGET 'http://192.168.125.200:9200/_cat/health?v'
epoch timestamp cluster status node.total node.data shards pri relo init unassign pending_tasks max_task_wait_time active_shards_percent
1522313584 16:53:04 ELK green 2 2 0 0 0 0 0 0 - 100.0%