Enable Stack Features (X-Pack) and trial by default (#387)

parent 270933fd
...@@ -4,6 +4,10 @@ services: docker ...@@ -4,6 +4,10 @@ services: docker
env: env:
- DOCKER_COMPOSE_VERSION=1.14.0 - DOCKER_COMPOSE_VERSION=1.14.0
before_install:
- sudo apt-get update
- sudo apt-get install -y expect jq
install: install:
# Install Docker Compose # Install Docker Compose
- curl -L "https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m)" -o "$HOME/bin/docker-compose" - curl -L "https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m)" -o "$HOME/bin/docker-compose"
...@@ -16,38 +20,38 @@ before_script: ...@@ -16,38 +20,38 @@ before_script:
# Pull buildpack image (curl 7.52.0+) # Pull buildpack image (curl 7.52.0+)
- docker pull buildpack-deps:artful-curl - docker pull buildpack-deps:artful-curl
- shopt -s expand_aliases
- alias curl='docker run --rm --net=host buildpack-deps:artful-curl curl' # Use built-in users
- sed -i 's/\(elasticsearch.username:\) elastic/\1 kibana/g' kibana/config/kibana.yml
- sed -i 's/\(xpack.monitoring.elasticsearch.username:\) elastic/\1 logstash_system/g' logstash/config/logstash.yml
script: script:
# Compose # Compose
- docker-compose up -d elasticsearch
- sleep 20
- .travis/elasticsearch-setup-passwords.exp
- docker-compose up -d - docker-compose up -d
- sleep 60 - sleep 90
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:9200/' - .travis/run-tests.sh
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:5601/api/status'
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:9600/_node/pipelines/main?pretty'
- echo 'dockerelk' | nc localhost 5000
- sleep 2
- curl -s -XPOST 'http://localhost:9200/_refresh'
- curl -s 'http://localhost:9200/_count?q=message:dockerelk' | egrep '^{"count":1,'
- curl -s 'http://localhost:9200/_search?q=message:dockerelk&pretty'
- docker-compose ps - docker-compose ps
- docker-compose logs - docker-compose logs elasticsearch
- docker-compose logs kibana
- docker-compose logs logstash
- docker-compose down -v - docker-compose down -v
# Swarm # Swarm
- docker swarm init - docker swarm init
- docker stack deploy -c ./docker-stack.yml elk - docker stack deploy -c ./docker-stack.yml elk
- docker service scale elk_kibana=0 --detach=false
- docker service scale elk_logstash=0 --detach=false
- sleep 60 - sleep 60
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:9200/' - .travis/elasticsearch-setup-passwords.exp swarm
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:5601/api/status' - docker service scale elk_kibana=1 --detach=false
- curl -s --retry 10 --retry-delay 5 --retry-connrefused -D- 'http://localhost:9600/_node/pipelines/main?pretty' - docker service scale elk_logstash=1 --detach=false
- echo 'dockerelk' | nc localhost 5000 - sleep 90
- sleep 2 - .travis/run-tests.sh
- curl -s -XPOST 'http://localhost:9200/_refresh'
- curl -s 'http://localhost:9200/_count?q=message:dockerelk' | egrep '^{"count":1,'
- curl -s 'http://localhost:9200/_search?q=message:dockerelk&pretty'
- docker stack services elk - docker stack services elk
- docker service logs elk_elasticsearch - docker service logs elk_elasticsearch
- docker service logs elk_kibana - docker service logs elk_kibana
- docker service logs elk_logstash - docker service logs elk_logstash
- docker stack rm elk
#!/usr/bin/expect -f
# List of expected users with dummy password
set user "(elastic|apm_system|kibana|logstash_system|beats_system|remote_monitoring_user)"
set password "changeme"
# Find elasticsearch container id
set MODE [lindex $argv 0]
if { [string match "swarm" $MODE] } {
set cid [exec docker ps -q -f label=com.docker.swarm.service.name=elk_elasticsearch]
} else {
set cid [exec docker ps -q -f label=com.docker.compose.service=elasticsearch]
}
set cmd "docker exec -it $cid bin/elasticsearch-setup-passwords interactive -s -b"
spawn {*}$cmd
expect {
-re "(E|Ree)nter password for \\\[$user\\\]: " {
send "$password\r"
exp_continue
}
eof
}
#!/usr/bin/env bash
set -eu
set -o pipefail
shopt -s expand_aliases
alias curl="docker run --rm --net=host buildpack-deps:artful-curl curl -s -w '\n'"
function log {
echo -e "\n[+] $1\n"
}
log 'Waiting for Elasticsearch readiness'
curl -D- 'http://localhost:9200/' \
--retry 10 \
--retry-delay 5 \
--retry-connrefused \
-u elastic:changeme
log 'Waiting for Kibana readiness'
curl -D- 'http://localhost:5601/api/status' \
--retry 10 \
--retry-delay 5 \
--retry-connrefused \
-u kibana:changeme
log 'Waiting for Logstash readiness'
curl -D- 'http://localhost:9600/_node/pipelines/main?pretty' \
--retry 10 \
--retry-delay 5 \
--retry-connrefused
log 'Creating Logstash index pattern in Kibana'
source .env
curl -X POST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \
-H 'Content-Type: application/json' \
-H "kbn-version: ${ELK_VERSION}" \
-u kibana:changeme \
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
log 'Searching index pattern via Kibana API'
response="$(curl 'http://localhost:5601/api/saved_objects/_find?type=index-pattern' -u elastic:changeme)"
echo $response
count="$(jq -rn --argjson data "${response}" '$data.total')"
if [[ $count -ne 1 ]]; then
echo "Expected 1 index pattern, got ${count}"
exit 1
fi
log 'Sending message to Logstash TCP input'
echo 'dockerelk' | nc localhost 5000
sleep 1
curl -X POST 'http://localhost:9200/_refresh' -u elastic:changeme
log 'Searching message in Elasticsearch'
response="$(curl 'http://localhost:9200/_count?q=message:dockerelk&pretty' -u elastic:changeme)"
echo $response
count="$(jq -rn --argjson data "${response}" '$data.count')"
if [[ $count -ne 1 ]]; then
echo "Expected 1 document, got ${count}"
exit 1
fi
...@@ -4,10 +4,14 @@ ...@@ -4,10 +4,14 @@
[![Elastic Stack version](https://img.shields.io/badge/ELK-6.7.0-blue.svg?style=flat)](https://github.com/deviantony/docker-elk/issues/376) [![Elastic Stack version](https://img.shields.io/badge/ELK-6.7.0-blue.svg?style=flat)](https://github.com/deviantony/docker-elk/issues/376)
[![Build Status](https://api.travis-ci.org/deviantony/docker-elk.svg?branch=master)](https://travis-ci.org/deviantony/docker-elk) [![Build Status](https://api.travis-ci.org/deviantony/docker-elk.svg?branch=master)](https://travis-ci.org/deviantony/docker-elk)
Run the latest version of the [Elastic stack](https://www.elastic.co/elk-stack) with Docker and Docker Compose. Run the latest version of the [Elastic stack][elk-stack] with Docker and Docker Compose.
It will give you the ability to analyze any data set by using the searching/aggregation capabilities of Elasticsearch It gives you the ability to analyze any data set by using the searching/aggregation capabilities of Elasticsearch and
and the visualization power of Kibana. the visualization power of Kibana.
> :information_source: The Docker images backing this stack include [Stack Features][stack-features] (formerly X-Pack)
with [paid features][paid-features] enabled by default (see [How to disable paid
features](#how-to-disable-paid-features) to disable them). The [trial license][trial-license] is valid for 30 days.
Based on the official Docker images from Elastic: Based on the official Docker images from Elastic:
...@@ -15,38 +19,41 @@ Based on the official Docker images from Elastic: ...@@ -15,38 +19,41 @@ Based on the official Docker images from Elastic:
* [logstash](https://github.com/elastic/logstash-docker) * [logstash](https://github.com/elastic/logstash-docker)
* [kibana](https://github.com/elastic/kibana-docker) * [kibana](https://github.com/elastic/kibana-docker)
**Note**: Other branches in this project are available: Other available stack variants:
* [`x-pack`](https://github.com/deviantony/docker-elk/tree/x-pack): X-Pack support
* [`searchguard`](https://github.com/deviantony/docker-elk/tree/searchguard): Search Guard support * [`searchguard`](https://github.com/deviantony/docker-elk/tree/searchguard): Search Guard support
* [`vagrant`](https://github.com/deviantony/docker-elk/tree/vagrant): run Docker inside Vagrant
## Contents ## Contents
1. [Requirements](#requirements) 1. [Requirements](#requirements)
* [Host setup](#host-setup) * [Host setup](#host-setup)
* [SELinux](#selinux) * [SELinux](#selinux)
* [Docker for Windows](#docker-for-windows) * [Docker for Desktop](#docker-for-desktop)
* [Windows](#windows)
* [macOS](#macos)
2. [Usage](#usage) 2. [Usage](#usage)
* [Bringing up the stack](#bringing-up-the-stack) * [Bringing up the stack](#bringing-up-the-stack)
* [Initial setup](#initial-setup) * [Initial setup](#initial-setup)
* [Setting up user authentication](#setting-up-user-authentication)
* [Default Kibana index pattern creation](#default-kibana-index-pattern-creation)
3. [Configuration](#configuration) 3. [Configuration](#configuration)
* [How can I tune the Kibana configuration?](#how-can-i-tune-the-kibana-configuration) * [How to configure Elasticsearch](#how-to-configure-elasticsearch)
* [How can I tune the Logstash configuration?](#how-can-i-tune-the-logstash-configuration) * [How to configure Kibana](#how-to-configure-kibana)
* [How can I tune the Elasticsearch configuration?](#how-can-i-tune-the-elasticsearch-configuration) * [How to configure Logstash](#how-to-configure-logstash)
* [How can I scale out the Elasticsearch cluster?](#how-can-i-scale-out-the-elasticsearch-cluster) * [How to disable paid features](#how-to-disable-paid-features)
* [How to scale out the Elasticsearch cluster](#how-to-scale-out-the-elasticsearch-cluster)
4. [Storage](#storage) 4. [Storage](#storage)
* [How can I persist Elasticsearch data?](#how-can-i-persist-elasticsearch-data) * [How to persist Elasticsearch data](#how-to-persist-elasticsearch-data)
5. [Extensibility](#extensibility) 5. [Extensibility](#extensibility)
* [How can I add plugins?](#how-can-i-add-plugins) * [How to add plugins](#how-to-add-plugins)
* [How can I enable the provided extensions?](#how-can-i-enable-the-provided-extensions) * [How to enable the provided extensions](#how-to-enable-the-provided-extensions)
6. [JVM tuning](#jvm-tuning) 6. [JVM tuning](#jvm-tuning)
* [How can I specify the amount of memory used by a service?](#how-can-i-specify-the-amount-of-memory-used-by-a-service) * [How to specify the amount of memory used by a service](#how-to-specify-the-amount-of-memory-used-by-a-service)
* [How can I enable a remote JMX connection to a service?](#how-can-i-enable-a-remote-jmx-connection-to-a-service) * [How to enable a remote JMX connection to a service](#how-to-enable-a-remote-jmx-connection-to-a-service)
7. [Going further](#going-further) 7. [Going further](#going-further)
* [Using a newer stack version](#using-a-newer-stack-version) * [Using a newer stack version](#using-a-newer-stack-version)
* [Plugins and integrations](#plugins-and-integrations) * [Plugins and integrations](#plugins-and-integrations)
* [Docker Swarm](#docker-swarm) * [Swarm mode](#swarm-mode)
## Requirements ## Requirements
...@@ -56,6 +63,12 @@ Based on the official Docker images from Elastic: ...@@ -56,6 +63,12 @@ Based on the official Docker images from Elastic:
2. Install [Docker Compose](https://docs.docker.com/compose/install/) version **1.6.0+** 2. Install [Docker Compose](https://docs.docker.com/compose/install/) version **1.6.0+**
3. Clone this repository 3. Clone this repository
By default, the stack exposes the following ports:
* 5000: Logstash TCP input
* 9200: Elasticsearch HTTP
* 9300: Elasticsearch TCP transport
* 5601: Kibana
### SELinux ### SELinux
On distributions which have SELinux enabled out-of-the-box you will need to either re-context the files or set SELinux On distributions which have SELinux enabled out-of-the-box you will need to either re-context the files or set SELinux
...@@ -66,17 +79,23 @@ apply the proper context: ...@@ -66,17 +79,23 @@ apply the proper context:
$ chcon -R system_u:object_r:admin_home_t:s0 docker-elk/ $ chcon -R system_u:object_r:admin_home_t:s0 docker-elk/
``` ```
### Docker for Windows ### Docker for Desktop
#### Windows
Ensure the [Shared Drives][win-shareddrives] feature is enabled for the `C:` drive.
If you're using Docker for Windows, ensure the "Shared Drives" feature is enabled for the `C:` drive (Docker for Windows > Settings > Shared Drives). See [Configuring Docker for Windows Shared Drives](https://blogs.msdn.microsoft.com/stevelasker/2016/06/14/configuring-docker-for-windows-volumes/) (MSDN Blog). #### macOS
The default Docker for Mac configuration allows mounting files from `/Users/`, `/Volumes/`, `/private/`, and `/tmp`
exclusively. Make sure the repository is cloned in one of those locations or follow the instructions from the
[documentation][mac-mounts] to add more locations.
## Usage ## Usage
### Bringing up the stack ### Bringing up the stack
**Note**: In case you switched branch or updated a base image - you may need to run `docker-compose build` first Start the stack using Docker Compose:
Start the stack using `docker-compose`:
```console ```console
$ docker-compose up $ docker-compose up
...@@ -84,28 +103,50 @@ $ docker-compose up ...@@ -84,28 +103,50 @@ $ docker-compose up
You can also run all services in the background (detached mode) by adding the `-d` flag to the above command. You can also run all services in the background (detached mode) by adding the `-d` flag to the above command.
Give Kibana a few seconds to initialize, then access the Kibana web UI by hitting > :information_source: You must run `docker-compose build` first whenever you switch branch or update a base image.
[http://localhost:5601](http://localhost:5601) with a web browser.
By default, the stack exposes the following ports: If you are starting the stack for the very first time, please read the section below attentively.
* 5000: Logstash TCP input.
* 9200: Elasticsearch HTTP
* 9300: Elasticsearch TCP transport
* 5601: Kibana
**WARNING**: If you're using `boot2docker`, you must access it via the `boot2docker` IP address instead of `localhost`. ## Initial setup
**WARNING**: If you're using *Docker Toolbox*, you must access it via the `docker-machine` IP address instead of ### Setting up user authentication
`localhost`.
Now that the stack is running, you will want to inject some log entries. The shipped Logstash configuration allows you > :information_source: Refer to [How to disable paid features](#how-to-disable-paid-features) to disable authentication.
to send content via TCP:
The stack is pre-configured with the following **privileged** bootstrap user:
* user: *elastic*
* password: *changeme*
Although all stack components work out-of-the-box with this user, we strongly recommend using the unprivileged [built-in
users][builtin-users] instead for increased security. Passwords for these users must be initialized:
```console ```console
$ nc localhost 5000 < /path/to/logfile.log $ docker-compose exec -T elasticsearch 'bin/elasticsearch-setup-passwords' auto --batch
``` ```
## Initial setup Passwords for all 6 built-in users will be randomly generated. Take note of them and replace the `elastic` username with
`kibana` and `logstash_system` inside the Kibana and Logstash *pipeline* configuration files respectively. See the
[Configuration](#configuration) section below.
Restart Kibana and Logstash to apply the passwords you just wrote to the configuration files.
```console
$ docker-compose restart kibana logstash
```
Give Kibana a few seconds to initialize, then access the Kibana web UI by hitting
[http://localhost:5601](http://localhost:5601) with a web browser and use the following default credentials to login:
* user: *elastic*
* password: *\<your generated elastic password>*
Now that the stack is running, you can go ahead and inject some log entries. The shipped Logstash configuration allows
you to send content via TCP:
```console
$ nc localhost 5000 < /path/to/logfile.log
```
### Default Kibana index pattern creation ### Default Kibana index pattern creation
...@@ -113,12 +154,11 @@ When Kibana launches for the first time, it is not configured with any index pat ...@@ -113,12 +154,11 @@ When Kibana launches for the first time, it is not configured with any index pat
#### Via the Kibana web UI #### Via the Kibana web UI
**NOTE**: You need to inject data into Logstash before being able to configure a Logstash index pattern via the Kibana web > :information_source: You need to inject data into Logstash before being able to configure a Logstash index pattern via
UI. Then all you have to do is hit the *Create* button. the Kibana web UI. Then all you have to do is hit the *Create* button.
Refer to [Connect Kibana with Refer to [Connect Kibana with Elasticsearch][connect-kibana] for detailed instructions about the index pattern
Elasticsearch](https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html) for detailed instructions configuration.
about the index pattern configuration.
#### On the command line #### On the command line
...@@ -128,6 +168,7 @@ Create an index pattern via the Kibana API: ...@@ -128,6 +168,7 @@ Create an index pattern via the Kibana API:
$ curl -XPOST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \ $ curl -XPOST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \
-H 'Content-Type: application/json' \ -H 'Content-Type: application/json' \
-H 'kbn-version: 6.7.0' \ -H 'kbn-version: 6.7.0' \
-u kibana:<your generated kibana password> \
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}' -d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
``` ```
...@@ -135,29 +176,14 @@ The created pattern will automatically be marked as the default index pattern as ...@@ -135,29 +176,14 @@ The created pattern will automatically be marked as the default index pattern as
## Configuration ## Configuration
**NOTE**: Configuration is not dynamically reloaded, you will need to restart the stack after any change in the > :information_source: Configuration is not dynamically reloaded, you will need to restart individual components after
configuration of a component. any configuration change.
### How can I tune the Kibana configuration?
The Kibana default configuration is stored in `kibana/config/kibana.yml`.
It is also possible to map the entire `config` directory instead of a single file. ### How to configure Elasticsearch
### How can I tune the Logstash configuration?
The Logstash configuration is stored in `logstash/config/logstash.yml`. The Elasticsearch configuration is stored in [`elasticsearch/config/elasticsearch.yml`][config-es].
It is also possible to map the entire `config` directory instead of a single file, however you must be aware that
Logstash will be expecting a
[`log4j2.properties`](https://github.com/elastic/logstash-docker/tree/master/build/logstash/config) file for its own
logging.
### How can I tune the Elasticsearch configuration? You can also specify the options you want to override by setting environment variables inside the Compose file:
The Elasticsearch configuration is stored in `elasticsearch/config/elasticsearch.yml`.
You can also specify the options you want to override directly via environment variables:
```yml ```yml
elasticsearch: elasticsearch:
...@@ -167,14 +193,31 @@ elasticsearch: ...@@ -167,14 +193,31 @@ elasticsearch:
cluster.name: "my-cluster" cluster.name: "my-cluster"
``` ```
### How can I scale out the Elasticsearch cluster? ### How to configure Kibana
The Kibana default configuration is stored in [`kibana/config/kibana.yml`][config-kbn].
It is also possible to map the entire `config` directory instead of a single file.
### How to configure Logstash
The Logstash configuration is stored in [`logstash/config/logstash.yml`][config-ls].
Follow the instructions from the Wiki: [Scaling out It is also possible to map the entire `config` directory instead of a single file, however you must be aware that
Elasticsearch](https://github.com/deviantony/docker-elk/wiki/Elasticsearch-cluster) Logstash will be expecting a [`log4j2.properties`][log4j-props] file for its own logging.
### How to disable paid features
Switch the value of Elasticsearch's `xpack.license.self_generated.type` option from `trial` to `basic` (see [License
settings][trial-license]).
### How to scale out the Elasticsearch cluster
Follow the instructions from the Wiki: [Scaling out Elasticsearch](https://github.com/deviantony/docker-elk/wiki/Elasticsearch-cluster)
## Storage ## Storage
### How can I persist Elasticsearch data? ### How to persist Elasticsearch data
The data stored in Elasticsearch will be persisted after container reboot but not after container removal. The data stored in Elasticsearch will be persisted after container reboot but not after container removal.
...@@ -190,18 +233,13 @@ elasticsearch: ...@@ -190,18 +233,13 @@ elasticsearch:
This will store Elasticsearch data inside `/path/to/storage`. This will store Elasticsearch data inside `/path/to/storage`.
**NOTE:** beware of these OS-specific considerations: > :information_source: (Linux users) Beware that the Elasticsearch process runs as the [unprivileged `elasticsearch`
* **Linux:** the [unprivileged `elasticsearch` user][esuser] is used within the Elasticsearch image, therefore the user][esuser] is used within the Elasticsearch image, therefore the mounted data directory must be writable by the uid
mounted data directory must be owned by the uid `1000`. `1000`.
* **macOS:** the default Docker for Mac configuration allows mounting files from `/Users/`, `/Volumes/`, `/private/`,
and `/tmp` exclusively. Follow the instructions from the [documentation][macmounts] to add more locations.
[esuser]: https://github.com/elastic/elasticsearch-docker/blob/016bcc9db1dd97ecd0ff60c1290e7fa9142f8ddd/templates/Dockerfile.j2#L22
[macmounts]: https://docs.docker.com/docker-for-mac/osxfs/
## Extensibility ## Extensibility
### How can I add plugins? ### How to add plugins
To add plugins to any ELK component you have to: To add plugins to any ELK component you have to:
...@@ -209,7 +247,7 @@ To add plugins to any ELK component you have to: ...@@ -209,7 +247,7 @@ To add plugins to any ELK component you have to:
2. Add the associated plugin code configuration to the service configuration (eg. Logstash input/output) 2. Add the associated plugin code configuration to the service configuration (eg. Logstash input/output)
3. Rebuild the images using the `docker-compose build` command 3. Rebuild the images using the `docker-compose build` command
### How can I enable the provided extensions? ### How to enable the provided extensions
A few extensions are available inside the [`extensions`](extensions) directory. These extensions provide features which A few extensions are available inside the [`extensions`](extensions) directory. These extensions provide features which
are not part of the standard Elastic stack, but can be used to enrich it with extra integrations. are not part of the standard Elastic stack, but can be used to enrich it with extra integrations.
...@@ -219,7 +257,7 @@ of them require manual changes to the default ELK configuration. ...@@ -219,7 +257,7 @@ of them require manual changes to the default ELK configuration.
## JVM tuning ## JVM tuning
### How can I specify the amount of memory used by a service? ### How to specify the amount of memory used by a service
By default, both Elasticsearch and Logstash start with [1/4 of the total host By default, both Elasticsearch and Logstash start with [1/4 of the total host
memory](https://docs.oracle.com/javase/8/docs/technotes/guides/vm/gctuning/parallel.html#default_heap_size) allocated to memory](https://docs.oracle.com/javase/8/docs/technotes/guides/vm/gctuning/parallel.html#default_heap_size) allocated to
...@@ -246,7 +284,7 @@ logstash: ...@@ -246,7 +284,7 @@ logstash:
LS_JAVA_OPTS: "-Xmx1g -Xms1g" LS_JAVA_OPTS: "-Xmx1g -Xms1g"
``` ```
### How can I enable a remote JMX connection to a service? ### How to enable a remote JMX connection to a service
As for the Java Heap memory (see above), you can specify JVM options to enable JMX and map the JMX port on the Docker As for the Java Heap memory (see above), you can specify JVM options to enable JMX and map the JMX port on the Docker
host. host.
...@@ -274,8 +312,8 @@ $ docker-compose build ...@@ -274,8 +312,8 @@ $ docker-compose build
$ docker-compose up $ docker-compose up
``` ```
**NOTE**: Always pay attention to the [upgrade instructions](https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html) > :information_source: Always pay attention to the [upgrade instructions][upgrade] for each individual component before
for each individual component before performing a stack upgrade. performing a stack upgrade.
### Plugins and integrations ### Plugins and integrations
...@@ -284,10 +322,10 @@ See the following Wiki pages: ...@@ -284,10 +322,10 @@ See the following Wiki pages:
* [External applications](https://github.com/deviantony/docker-elk/wiki/External-applications) * [External applications](https://github.com/deviantony/docker-elk/wiki/External-applications)
* [Popular integrations](https://github.com/deviantony/docker-elk/wiki/Popular-integrations) * [Popular integrations](https://github.com/deviantony/docker-elk/wiki/Popular-integrations)
### Docker Swarm ### Swarm mode
Experimental support for Docker Swarm is provided in the form of a `docker-stack.yml` file, which can be deployed in an Experimental support for Docker [Swarm mode][swarm-mode] is provided in the form of a `docker-stack.yml` file, which can
existing Swarm cluster using the following command: be deployed in an existing Swarm cluster using the following command:
```console ```console
$ docker stack deploy -c docker-stack.yml elk $ docker stack deploy -c docker-stack.yml elk
...@@ -299,5 +337,29 @@ If all components get deployed without any error, the following command will sho ...@@ -299,5 +337,29 @@ If all components get deployed without any error, the following command will sho
$ docker stack services elk $ docker stack services elk
``` ```
**NOTE:** to scale Elasticsearch in Swarm mode, configure *zen* to use the DNS name `tasks.elasticsearch` instead of > :information_source: To scale Elasticsearch in Swarm mode, configure *zen* to use the DNS name `tasks.elasticsearch`
`elasticsearch`. instead of `elasticsearch`.
[elk-stack]: https://www.elastic.co/elk-stack
[stack-features]: https://www.elastic.co/products/stack
[paid-features]: https://www.elastic.co/subscriptions
[trial-license]: https://www.elastic.co/guide/en/elasticsearch/reference/current/license-settings.html
[win-shareddrives]: https://docs.docker.com/docker-for-windows/#shared-drives
[mac-mounts]: https://docs.docker.com/docker-for-mac/osxfs/
[builtin-users]: https://www.elastic.co/guide/en/x-pack/current/setting-up-authentication.html#built-in-users
[connect-kibana]: https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html
[config-es]: ./elasticsearch/config/elasticsearch.yml
[config-kbn]: ./kibana/config/kibana.yml
[config-ls]: ./logstash/config/logstash.yml
[log4j-props]: https://github.com/elastic/logstash-docker/tree/master/build/logstash/config
[esuser]: https://github.com/elastic/elasticsearch-docker/blob/c2877ef/.tedi/template/bin/docker-entrypoint.sh#L9-L10
[upgrade]: https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html
[swarm-mode]: https://docs.docker.com/engine/swarm/
...@@ -14,6 +14,7 @@ services: ...@@ -14,6 +14,7 @@ services:
- "9300:9300" - "9300:9300"
environment: environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m" ES_JAVA_OPTS: "-Xmx256m -Xms256m"
ELASTIC_PASSWORD: changeme
networks: networks:
- elk - elk
...@@ -41,7 +42,7 @@ services: ...@@ -41,7 +42,7 @@ services:
args: args:
ELK_VERSION: $ELK_VERSION ELK_VERSION: $ELK_VERSION
volumes: volumes:
- ./kibana/config/:/usr/share/kibana/config:ro - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro
ports: ports:
- "5601:5601" - "5601:5601"
networks: networks:
......
...@@ -3,7 +3,7 @@ version: '3.3' ...@@ -3,7 +3,7 @@ version: '3.3'
services: services:
elasticsearch: elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.6.1 image: docker.elastic.co/elasticsearch/elasticsearch:6.7.0
ports: ports:
- "9200:9200" - "9200:9200"
- "9300:9300" - "9300:9300"
...@@ -12,6 +12,7 @@ services: ...@@ -12,6 +12,7 @@ services:
target: /usr/share/elasticsearch/config/elasticsearch.yml target: /usr/share/elasticsearch/config/elasticsearch.yml
environment: environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m" ES_JAVA_OPTS: "-Xmx256m -Xms256m"
ELASTIC_PASSWORD: changeme
networks: networks:
- elk - elk
deploy: deploy:
...@@ -19,7 +20,7 @@ services: ...@@ -19,7 +20,7 @@ services:
replicas: 1 replicas: 1
logstash: logstash:
image: docker.elastic.co/logstash/logstash-oss:6.6.1 image: docker.elastic.co/logstash/logstash:6.7.0
ports: ports:
- "5000:5000" - "5000:5000"
- "9600:9600" - "9600:9600"
...@@ -37,7 +38,7 @@ services: ...@@ -37,7 +38,7 @@ services:
replicas: 1 replicas: 1
kibana: kibana:
image: docker.elastic.co/kibana/kibana-oss:6.6.1 image: docker.elastic.co/kibana/kibana:6.7.0
ports: ports:
- "5601:5601" - "5601:5601"
configs: configs:
......
ARG ELK_VERSION ARG ELK_VERSION
# https://github.com/elastic/elasticsearch-docker # https://github.com/elastic/elasticsearch-docker
FROM docker.elastic.co/elasticsearch/elasticsearch-oss:${ELK_VERSION} FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}
# Add your elasticsearch plugins setup here # Add your elasticsearch plugins setup here
# Example: RUN elasticsearch-plugin install analysis-icu # Example: RUN elasticsearch-plugin install analysis-icu
--- ---
## Default Elasticsearch configuration from elasticsearch-docker. ## Default Elasticsearch configuration from elasticsearch-docker.
## from https://github.com/elastic/elasticsearch-docker/blob/master/build/elasticsearch/elasticsearch.yml ## from https://github.com/elastic/elasticsearch-docker/blob/master/.tedi/template/elasticsearch.yml
# #
cluster.name: "docker-cluster" cluster.name: "docker-cluster"
network.host: 0.0.0.0 network.host: 0.0.0.0
...@@ -14,3 +14,10 @@ discovery.zen.minimum_master_nodes: 1 ...@@ -14,3 +14,10 @@ discovery.zen.minimum_master_nodes: 1
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html ## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
# #
discovery.type: single-node discovery.type: single-node
## X-Pack settings
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html
#
xpack.license.self_generated.type: trial
xpack.security.enabled: true
xpack.monitoring.collection.enabled: true
ARG ELK_VERSION ARG ELK_VERSION
# https://github.com/elastic/kibana-docker # https://github.com/elastic/kibana-docker
FROM docker.elastic.co/kibana/kibana-oss:${ELK_VERSION} FROM docker.elastic.co/kibana/kibana:${ELK_VERSION}
# Add your kibana plugins setup here # Add your kibana plugins setup here
# Example: RUN kibana-plugin install <name|url> # Example: RUN kibana-plugin install <name|url>
--- ---
## Default Kibana configuration from kibana-docker. ## Default Kibana configuration from kibana-docker.
## from https://github.com/elastic/kibana-docker/blob/master/build/kibana/config/kibana.yml ## https://github.com/elastic/kibana-docker/blob/master/.tedi/template/kibana.yml.j2
# #
server.name: kibana server.name: kibana
server.host: "0" server.host: "0"
elasticsearch.url: http://elasticsearch:9200 elasticsearch.hosts: [ "http://elasticsearch:9200" ]
xpack.monitoring.ui.container.elasticsearch.enabled: true
## X-Pack security credentials
#
elasticsearch.username: elastic
elasticsearch.password: changeme
ARG ELK_VERSION ARG ELK_VERSION
# https://github.com/elastic/logstash-docker # https://github.com/elastic/logstash-docker
FROM docker.elastic.co/logstash/logstash-oss:${ELK_VERSION} FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
# Add your logstash plugins setup here # Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json # Example: RUN logstash-plugin install logstash-filter-json
--- ---
## Default Logstash configuration from logstash-docker. ## Default Logstash configuration from logstash-docker.
## from https://github.com/elastic/logstash-docker/blob/master/build/logstash/config/logstash-oss.yml ## from https://github.com/elastic/logstash-docker/blob/master/build/logstash/config/logstash-full.yml
# #
http.host: "0.0.0.0" http.host: "0.0.0.0"
path.config: /usr/share/logstash/pipeline xpack.monitoring.elasticsearch.url: http://elasticsearch:9200
## X-Pack security credentials
#
xpack.monitoring.elasticsearch.username: elastic
xpack.monitoring.elasticsearch.password: changeme
...@@ -9,5 +9,7 @@ input { ...@@ -9,5 +9,7 @@ input {
output { output {
elasticsearch { elasticsearch {
hosts => "elasticsearch:9200" hosts => "elasticsearch:9200"
user => elastic
password => changeme
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment