2018-11-11 09:41:11 +01:00
|
|
|
{ system ? builtins.currentSystem,
|
|
|
|
config ? {},
|
|
|
|
pkgs ? import ../.. { inherit system config; },
|
|
|
|
enableUnfree ? false
|
2019-04-09 12:34:01 +02:00
|
|
|
# To run the test on the unfree ELK use the folllowing command:
|
|
|
|
# NIXPKGS_ALLOW_UNFREE=1 nix-build nixos/tests/elk.nix -A ELK-6 --arg enableUnfree true
|
2018-11-11 09:41:11 +01:00
|
|
|
}:
|
|
|
|
|
2017-06-13 22:36:08 +02:00
|
|
|
let
|
|
|
|
esUrl = "http://localhost:9200";
|
|
|
|
|
2019-04-14 21:39:46 +02:00
|
|
|
mkElkTest = name : elk :
|
2020-01-15 10:09:44 +01:00
|
|
|
import ./make-test-python.nix ({
|
2017-12-18 20:53:54 +01:00
|
|
|
inherit name;
|
|
|
|
meta = with pkgs.stdenv.lib.maintainers; {
|
2019-02-22 16:14:13 +01:00
|
|
|
maintainers = [ eelco offline basvandijk ];
|
2017-12-18 20:53:54 +01:00
|
|
|
};
|
|
|
|
nodes = {
|
|
|
|
one =
|
2019-05-10 15:41:41 +02:00
|
|
|
{ pkgs, lib, ... }: {
|
2017-12-18 20:53:54 +01:00
|
|
|
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
|
|
|
|
#
|
|
|
|
# OpenJDK 64-Bit Server VM warning:
|
|
|
|
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
|
|
|
|
# failed; error='Cannot allocate memory' (errno=12)
|
|
|
|
#
|
|
|
|
# There is insufficient memory for the Java Runtime Environment to continue.
|
|
|
|
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
|
|
|
|
#
|
|
|
|
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
|
|
|
|
# memory: compulsory panic_on_oom is enabled" so lets give it even a
|
|
|
|
# bit more room:
|
|
|
|
virtualisation.memorySize = 3000;
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
# For querying JSON objects returned from elasticsearch and kibana.
|
|
|
|
environment.systemPackages = [ pkgs.jq ];
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
services = {
|
2019-05-10 15:41:41 +02:00
|
|
|
|
|
|
|
journalbeat = let lt6 = builtins.compareVersions
|
|
|
|
elk.journalbeat.version "6" < 0; in {
|
|
|
|
enable = true;
|
|
|
|
package = elk.journalbeat;
|
2020-01-08 10:02:26 +01:00
|
|
|
extraConfig = pkgs.lib.mkOptionDefault (''
|
2019-05-10 15:41:41 +02:00
|
|
|
logging:
|
|
|
|
to_syslog: true
|
|
|
|
level: warning
|
|
|
|
metrics.enabled: false
|
|
|
|
output.elasticsearch:
|
|
|
|
hosts: [ "127.0.0.1:9200" ]
|
2020-01-08 10:02:26 +01:00
|
|
|
${pkgs.lib.optionalString lt6 "template.enabled: false"}
|
|
|
|
'' + pkgs.lib.optionalString (!lt6) ''
|
2019-05-10 15:41:41 +02:00
|
|
|
journalbeat.inputs:
|
|
|
|
- paths: []
|
|
|
|
seek: cursor
|
|
|
|
'');
|
|
|
|
};
|
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
logstash = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.logstash;
|
|
|
|
inputConfig = ''
|
|
|
|
exec { command => "echo -n flowers" interval => 1 type => "test" }
|
|
|
|
exec { command => "echo -n dragons" interval => 1 type => "test" }
|
|
|
|
'';
|
|
|
|
filterConfig = ''
|
|
|
|
if [message] =~ /dragons/ {
|
|
|
|
drop {}
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
outputConfig = ''
|
|
|
|
file {
|
|
|
|
path => "/tmp/logstash.out"
|
|
|
|
codec => line { format => "%{message}" }
|
|
|
|
}
|
|
|
|
elasticsearch {
|
|
|
|
hosts => [ "${esUrl}" ]
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
elasticsearch = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.elasticsearch;
|
|
|
|
};
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
kibana = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.kibana;
|
|
|
|
};
|
2018-08-21 10:39:25 +02:00
|
|
|
|
|
|
|
elasticsearch-curator = {
|
2020-01-15 10:09:44 +01:00
|
|
|
enable = true;
|
2018-08-21 10:39:25 +02:00
|
|
|
actionYAML = ''
|
|
|
|
---
|
|
|
|
actions:
|
|
|
|
1:
|
|
|
|
action: delete_indices
|
|
|
|
description: >-
|
2018-08-25 16:46:39 +02:00
|
|
|
Delete indices older than 1 second (based on index name), for logstash-
|
2018-08-21 10:39:25 +02:00
|
|
|
prefixed indices. Ignore the error if the filter does not result in an
|
|
|
|
actionable list of indices (ignore_empty_list) and exit cleanly.
|
|
|
|
options:
|
2020-04-29 15:23:50 +02:00
|
|
|
allow_ilm_indices: true
|
2018-08-21 10:39:25 +02:00
|
|
|
ignore_empty_list: True
|
|
|
|
disable_action: False
|
|
|
|
filters:
|
|
|
|
- filtertype: pattern
|
|
|
|
kind: prefix
|
|
|
|
value: logstash-
|
|
|
|
- filtertype: age
|
|
|
|
source: name
|
|
|
|
direction: older
|
|
|
|
timestring: '%Y.%m.%d'
|
2018-08-25 16:46:39 +02:00
|
|
|
unit: seconds
|
2018-08-21 10:39:25 +02:00
|
|
|
unit_count: 1
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 22:36:08 +02:00
|
|
|
};
|
|
|
|
};
|
2017-12-18 20:53:54 +01:00
|
|
|
};
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
testScript = ''
|
2020-01-08 10:02:26 +01:00
|
|
|
import json
|
|
|
|
|
|
|
|
|
|
|
|
def total_hits(message):
|
|
|
|
dictionary = {"query": {"match": {"message": message}}}
|
|
|
|
return (
|
|
|
|
"curl --silent --show-error '${esUrl}/_search' "
|
|
|
|
+ "-H 'Content-Type: application/json' "
|
|
|
|
+ "-d '{}' ".format(json.dumps(dictionary))
|
|
|
|
+ "| jq .hits.total"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
start_all()
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2020-01-08 10:02:26 +01:00
|
|
|
one.wait_for_unit("elasticsearch.service")
|
|
|
|
one.wait_for_open_port(9200)
|
2017-06-13 22:36:08 +02:00
|
|
|
|
2017-12-18 20:53:54 +01:00
|
|
|
# Continue as long as the status is not "red". The status is probably
|
|
|
|
# "yellow" instead of "green" because we are using a single elasticsearch
|
|
|
|
# node which elasticsearch considers risky.
|
|
|
|
#
|
2019-05-10 15:41:41 +02:00
|
|
|
# TODO: extend this test with multiple elasticsearch nodes
|
|
|
|
# and see if the status turns "green".
|
2020-01-08 10:02:26 +01:00
|
|
|
one.wait_until_succeeds(
|
|
|
|
"curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red"
|
|
|
|
)
|
|
|
|
|
|
|
|
with subtest("Perform some simple logstash tests"):
|
|
|
|
one.wait_for_unit("logstash.service")
|
|
|
|
one.wait_until_succeeds("cat /tmp/logstash.out | grep flowers")
|
|
|
|
one.wait_until_succeeds("cat /tmp/logstash.out | grep -v dragons")
|
|
|
|
|
|
|
|
with subtest("Kibana is healthy"):
|
|
|
|
one.wait_for_unit("kibana.service")
|
|
|
|
one.wait_until_succeeds(
|
|
|
|
"curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green"
|
|
|
|
)
|
|
|
|
|
|
|
|
with subtest("Logstash messages arive in elasticsearch"):
|
|
|
|
one.wait_until_succeeds(total_hits("flowers") + " | grep -v 0")
|
|
|
|
one.wait_until_succeeds(total_hits("dragons") + " | grep 0")
|
|
|
|
|
|
|
|
with subtest(
|
|
|
|
"A message logged to the journal is ingested by elasticsearch via journalbeat"
|
|
|
|
):
|
|
|
|
one.wait_for_unit("journalbeat.service")
|
|
|
|
one.execute("echo 'Supercalifragilisticexpialidocious' | systemd-cat")
|
|
|
|
one.wait_until_succeeds(
|
|
|
|
total_hits("Supercalifragilisticexpialidocious") + " | grep -v 0"
|
|
|
|
)
|
2020-01-15 10:09:44 +01:00
|
|
|
|
2020-01-08 10:02:26 +01:00
|
|
|
with subtest("Elasticsearch-curator works"):
|
|
|
|
one.systemctl("stop logstash")
|
|
|
|
one.systemctl("start elasticsearch-curator")
|
|
|
|
one.wait_until_succeeds(
|
|
|
|
'! curl --silent --show-error "${esUrl}/_cat/indices" | grep logstash | grep -q ^'
|
|
|
|
)
|
2017-12-18 20:53:54 +01:00
|
|
|
'';
|
2020-01-08 10:02:26 +01:00
|
|
|
}) {};
|
|
|
|
in pkgs.lib.mapAttrs mkElkTest {
|
2019-08-13 23:52:01 +02:00
|
|
|
ELK-6 =
|
2018-06-24 13:22:12 +02:00
|
|
|
if enableUnfree
|
|
|
|
then {
|
|
|
|
elasticsearch = pkgs.elasticsearch6;
|
|
|
|
logstash = pkgs.logstash6;
|
|
|
|
kibana = pkgs.kibana6;
|
2019-05-10 15:41:41 +02:00
|
|
|
journalbeat = pkgs.journalbeat6;
|
2018-06-24 13:22:12 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
elasticsearch = pkgs.elasticsearch6-oss;
|
|
|
|
logstash = pkgs.logstash6-oss;
|
|
|
|
kibana = pkgs.kibana6-oss;
|
2019-05-10 15:41:41 +02:00
|
|
|
journalbeat = pkgs.journalbeat6;
|
2018-06-24 13:22:12 +02:00
|
|
|
};
|
2019-08-13 23:52:01 +02:00
|
|
|
ELK-7 =
|
2019-04-14 21:39:46 +02:00
|
|
|
if enableUnfree
|
|
|
|
then {
|
|
|
|
elasticsearch = pkgs.elasticsearch7;
|
|
|
|
logstash = pkgs.logstash7;
|
|
|
|
kibana = pkgs.kibana7;
|
2019-05-10 15:41:41 +02:00
|
|
|
journalbeat = pkgs.journalbeat7;
|
2019-04-14 21:39:46 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
elasticsearch = pkgs.elasticsearch7-oss;
|
|
|
|
logstash = pkgs.logstash7-oss;
|
|
|
|
kibana = pkgs.kibana7-oss;
|
2019-05-10 15:41:41 +02:00
|
|
|
journalbeat = pkgs.journalbeat7;
|
2019-04-14 21:39:46 +02:00
|
|
|
};
|
2017-12-18 20:53:54 +01:00
|
|
|
}
|