Dzahn has submitted this change and it was merged.

Change subject: logstash: Break logstash.pp up into individual classes
......................................................................


logstash: Break logstash.pp up into individual classes

Break the legacy all-in-one logstash.pp file up into proper one class
per file organization. The class formerly known as ::role::logstash is
now named ::role::logstash::collector. The site.pp config has been
modified for this but an associated manual change will be needed for the
deployment-logstash2.deployment-prep instance in Labs.

Bug: T93645
Change-Id: Ifc02cb52cc6a49611fbc6c1158f6fd87b74abb16
---
M hieradata/labs/deployment-prep/common.yaml
R hieradata/role/common/logstash/collector.yaml
M manifests/site.pp
D modules/role/manifests/logstash.pp
A modules/role/manifests/logstash/apifeatureusage.pp
A modules/role/manifests/logstash/collector.pp
A modules/role/manifests/logstash/elasticsearch.pp
A modules/role/manifests/logstash/eventlogging.pp
A modules/role/manifests/logstash/puppetreports.pp
9 files changed, 337 insertions(+), 336 deletions(-)

Approvals:
  Dzahn: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/hieradata/labs/deployment-prep/common.yaml 
b/hieradata/labs/deployment-prep/common.yaml
index 1bafd71..40c7717 100644
--- a/hieradata/labs/deployment-prep/common.yaml
+++ b/hieradata/labs/deployment-prep/common.yaml
@@ -197,7 +197,7 @@
 "elasticsearch::cluster_name": beta-search
 "elasticsearch::graylog_hosts":
   - deployment-logstash2.deployment-prep.eqiad.wmflabs
-role::logstash::statsd_host: labmon1001.eqiad.wmnet
+role::logstash::collector::statsd_host: labmon1001.eqiad.wmnet
 "mediawiki::redis_servers::eqiad":
   shard01:
     host: 10.68.16.177 # deployment-redis01
diff --git a/hieradata/role/common/logstash.yaml 
b/hieradata/role/common/logstash/collector.yaml
similarity index 96%
rename from hieradata/role/common/logstash.yaml
rename to hieradata/role/common/logstash/collector.yaml
index 51ace6a..c4ab218 100644
--- a/hieradata/role/common/logstash.yaml
+++ b/hieradata/role/common/logstash/collector.yaml
@@ -38,7 +38,7 @@
   - logstash1005.eqiad.wmnet
   - logstash1006.eqiad.wmnet
 
-role::logstash::statsd_host: statsd.eqiad.wmnet
+role::logstash::collector::statsd_host: statsd.eqiad.wmnet
 
 # Kibana
 role::kibana::vhost: logstash.wikimedia.org
diff --git a/manifests/site.pp b/manifests/site.pp
index 19a2ccc..d5a3f47 100644
--- a/manifests/site.pp
+++ b/manifests/site.pp
@@ -1514,12 +1514,12 @@
 }
 
 node /^logstash100[1-2]\.eqiad\.wmnet$/ {
-    role(logstash, kibana, logstash::apifeatureusage)
+    role(logstash::collector, kibana, logstash::apifeatureusage)
     include ::lvs::realserver
 }
 
 node /^logstash1003\.eqiad\.wmnet$/ {
-    role(logstash, kibana, logstash::apifeatureusage, logstash::eventlogging)
+    role(logstash::collector, kibana, logstash::apifeatureusage, 
logstash::eventlogging)
     include ::lvs::realserver
 }
 node /^logstash100[4-6]\.eqiad\.wmnet$/ {
diff --git a/modules/role/manifests/logstash.pp 
b/modules/role/manifests/logstash.pp
deleted file mode 100644
index 745ac6a..0000000
--- a/modules/role/manifests/logstash.pp
+++ /dev/null
@@ -1,332 +0,0 @@
-# vim:sw=4 ts=4 sts=4 et:
-# == Class: role::logstash
-#
-# Provisions Logstash and ElasticSearch.
-#
-# == Parameters:
-# - $statsd_host: Host to send statsd data to.
-#
-class role::logstash (
-    $statsd_host,
-) {
-    include ::role::logstash::elasticsearch
-    include ::logstash
-    include base::firewall
-
-    nrpe::monitor_service { 'logstash':
-        description  => 'logstash process',
-        nrpe_command => '/usr/lib/nagios/plugins/check_procs -c 1:1 -u 
logstash -C java -a logstash',
-    }
-
-    ## Inputs (10)
-
-    logstash::input::udp2log { 'mediawiki':
-        port => 8324,
-    }
-
-    ferm::service { 'logstash_udp2log':
-        proto   => 'udp',
-        port    => '8324',
-        notrack => true,
-        srange  => '$DOMAIN_NETWORKS',
-    }
-
-    logstash::input::syslog { 'syslog':
-        port => 10514,
-    }
-
-    ferm::service { 'logstash_syslog':
-        proto   => 'udp',
-        port    => '10514',
-        notrack => true,
-        srange  => '$DOMAIN_NETWORKS',
-    }
-
-    ferm::service { 'grafana_dashboard_definition_storage':
-        proto  => 'tcp',
-        port   => '9200',
-        srange => '@resolve(krypton.eqiad.wmnet)',
-    }
-
-    ferm::service { 'logstash_canary_checker_reporting':
-        proto  => 'tcp',
-        port   => '9200',
-        srange => '($DEPLOYMENT_HOSTS $MAINTENANCE_HOSTS)',
-    }
-
-    logstash::input::gelf { 'gelf':
-        port => 12201,
-    }
-
-    ferm::service { 'logstash_gelf':
-        proto   => 'udp',
-        port    => '12201',
-        notrack => true,
-        srange  => '$DOMAIN_NETWORKS',
-    }
-
-    logstash::input::log4j { 'log4j': }
-
-    ferm::service { 'logstash_log4j':
-        proto   => 'tcp',
-        port    => '4560',
-        notrack => true,
-        srange  => '$DOMAIN_NETWORKS',
-    }
-
-    # Also used for UDP JSON logging from python-logstash lib (e.g. Striker)
-    logstash::input::udp { 'logback':
-        port  => 11514,
-        codec => 'json',
-    }
-
-    ferm::service { 'logstash_udp':
-        proto   => 'udp',
-        port    => '11514',
-        notrack => true,
-        srange  => '$DOMAIN_NETWORKS',
-    }
-
-    ## Global pre-processing (15)
-
-    # move files into module?
-    # lint:ignore:puppet_url_without_modules
-    logstash::conf { 'filter_strip_ansi_color':
-        source   => 
'puppet:///modules/role/logstash/filter-strip-ansi-color.conf',
-        priority => 15,
-    }
-
-    ## Input specific processing (20)
-
-    logstash::conf { 'filter_syslog':
-        source   => 'puppet:///modules/role/logstash/filter-syslog.conf',
-        priority => 20,
-    }
-
-    logstash::conf { 'filter_udp2log':
-        source   => 'puppet:///modules/role/logstash/filter-udp2log.conf',
-        priority => 20,
-    }
-
-    logstash::conf { 'filter_gelf':
-        source   => 'puppet:///modules/role/logstash/filter-gelf.conf',
-        priority => 20,
-    }
-
-    logstash::conf { 'filter_logback':
-        source   => 'puppet:///modules/role/logstash/filter-logback.conf',
-        priority => 20,
-    }
-
-    ## Application specific processing (50)
-
-    logstash::conf { 'filter_mediawiki':
-        source   => 'puppet:///modules/role/logstash/filter-mediawiki.conf',
-        priority => 50,
-    }
-
-    logstash::conf { 'filter_striker':
-        source   => 'puppet:///modules/role/logstash/filter-striker.conf',
-        priority => 50,
-    }
-
-    ## Global post-processing (70)
-
-    logstash::conf { 'filter_add_normalized_message':
-        source   => 
'puppet:///modules/role/logstash/filter-add-normalized-message.conf',
-        priority => 70,
-    }
-
-    logstash::conf { 'filter_normalize_log_levels':
-        source   => 
'puppet:///modules/role/logstash/filter-normalize-log-levels.conf',
-        priority => 70,
-    }
-
-    logstash::conf { 'filter_de_dot':
-        source   => 'puppet:///modules/role/logstash/filter-de_dot.conf',
-        priority => 70,
-    }
-
-    ## Outputs (90)
-    # Template for Elasticsearch index creation
-    file { '/etc/logstash/elasticsearch-template.json':
-        ensure => present,
-        source => 
'puppet:///modules/role/logstash/elasticsearch-template.json',
-        owner  => 'root',
-        group  => 'root',
-        mode   => '0444',
-    }
-    # lint:endignore
-
-    logstash::output::elasticsearch { 'logstash':
-        host            => '127.0.0.1',
-        guard_condition => '"es" in [tags]',
-        manage_indices  => true,
-        priority        => 90,
-        template        => '/etc/logstash/elasticsearch-template.json',
-        require         => File['/etc/logstash/elasticsearch-template.json'],
-    }
-
-    logstash::output::statsd { 'MW_channel_rate':
-        host            => $statsd_host,
-        guard_condition => '[type] == "mediawiki" and "es" in [tags]',
-        namespace       => 'logstash.rate',
-        sender          => 'mediawiki',
-        increment       => [ '%{channel}.%{level}' ],
-    }
-
-    logstash::output::statsd { 'OOM_channel_rate':
-        host            => $statsd_host,
-        guard_condition => '[type] == "hhvm" and [message] =~ "request has 
exceeded memory limit"',
-        namespace       => 'logstash.rate',
-        sender          => 'oom',
-        increment       => [ '%{level}' ],
-    }
-
-    logstash::output::statsd { 'HHVM_channel_rate':
-        host            => $statsd_host,
-        guard_condition => '[type] == "hhvm" and [message] !~ "request has 
exceeded memory limit"',
-        namespace       => 'logstash.rate',
-        sender          => 'hhvm',
-        increment       => [ '%{level}' ],
-    }
-
-    logstash::output::statsd { 'Apache2_channel_rate':
-        host            => $statsd_host,
-        guard_condition => '[type] == "apache2" and "syslog" in [tags]',
-        namespace       => 'logstash.rate',
-        sender          => 'apache2',
-        increment       => [ '%{level}' ],
-    }
-}
-
-# == Class: role::logstash::elasticsearch
-#
-# Provisions Elasticsearch backend node for a Logstash cluster.
-#
-class role::logstash::elasticsearch {
-    include standard
-    include ::elasticsearch::nagios::check
-    include ::elasticsearch::monitor::diamond
-    include base::firewall
-
-    if $::standard::has_ganglia {
-        include ::elasticsearch::ganglia
-    }
-
-    package { 'elasticsearch/plugins':
-        provider => 'trebuchet',
-    }
-
-    class { '::elasticsearch':
-        require      => Package['elasticsearch/plugins'],
-        java_package => 'openjdk-8-jdk',
-    }
-
-    $logstash_nodes = hiera('logstash::cluster_hosts')
-    $logstash_nodes_ferm = join($logstash_nodes, ' ')
-
-    ferm::service { 'logstash_elastic_internode':
-        proto   => 'tcp',
-        port    => 9300,
-        notrack => true,
-        srange  => "@resolve((${logstash_nodes_ferm}))",
-    }
-}
-
-# == Class: role::logstash::puppetreports
-#
-# Set up a TCP listener to listen for puppet failure reports.
-class role::logstash::puppetreports {
-    require ::role::logstash
-
-    if $::realm != 'labs' {
-        # Constrain to only labs, security issues in prod have not been worked 
out yet
-        fail('role::logstash::puppetreports may only be deployed to Labs.')
-    }
-
-    logstash::input::tcp { 'tcp_json':
-        port  => 5229,
-        codec => 'json_lines',
-    }
-
-    ferm::service { 'logstash_tcp_json':
-        proto  => 'tcp',
-        port   => '5229',
-        srange => '$DOMAIN_NETWORKS',
-    }
-
-    # lint:ignore:puppet_url_without_modules
-    logstash::conf { 'filter_puppet':
-        source   => 'puppet:///modules/role/logstash/filter-puppet.conf',
-        priority => 50,
-    }
-    # lint:endignore
-}
-
-
-# == Class: role::logstash::apifeatureusage
-#
-# Builds on role::logstash to insert sanitized data for
-# Extension:ApiFeatureUsage into Elasticsearch.
-#
-class role::logstash::apifeatureusage {
-    include ::role::logstash
-
-    # FIXME: make this a param and use hiera to vary by realm
-    $host            = $::realm ? {
-        'production' => '10.2.2.30', # search.svc.eqiad.wmnet
-        'labs'       => 'deployment-elastic05', # Pick one at random
-    }
-
-    # Template for Elasticsearch index creation
-    # lint:ignore:puppet_url_without_modules
-    file { '/etc/logstash/apifeatureusage-template.json':
-        ensure => present,
-        source => 
'puppet:///modules/role/logstash/apifeatureusage-template.json',
-        owner  => 'root',
-        group  => 'root',
-        mode   => '0444',
-    }
-
-    # Add configuration to logstash
-    # Needs to come after 'filter_mediawiki' (priority 50)
-    logstash::conf { 'filter_apifeatureusage':
-        source   => 
'puppet:///modules/role/logstash/filter-apifeatureusage.conf',
-        priority => 55,
-    }
-    # lint:endignore
-
-    # Output destined for separate Elasticsearch cluster from Logstash cluster
-    logstash::output::elasticsearch { 'apifeatureusage':
-        host            => $host,
-        guard_condition => '[type] == "api-feature-usage-sanitized"',
-        manage_indices  => true,
-        priority        => 95,
-        template        => '/etc/logstash/apifeatureusage-template.json',
-        require         => File['/etc/logstash/apifeatureusage-template.json'],
-    }
-}
-
-# == Class: role::logstash::eventlogging
-#
-# Configure Logstash to consume validation logs from EventLogging.
-#
-class role::logstash::eventlogging {
-    include ::role::logstash
-
-    $topic = 'eventlogging_EventError'
-    $kafka_config = kafka_config('analytics')
-
-    logstash::input::kafka { $topic:
-        tags       => [$topic, 'kafka'],
-        type       => 'eventlogging',
-        zk_connect => $kafka_config['zookeeper']['url'],
-    }
-    # lint:ignore:puppet_url_without_modules
-    logstash::conf { 'filter_eventlogging':
-        source   => 'puppet:///modules/role/logstash/filter-eventlogging.conf',
-        priority => 50,
-    }
-    # lint:endignore
-}
diff --git a/modules/role/manifests/logstash/apifeatureusage.pp 
b/modules/role/manifests/logstash/apifeatureusage.pp
new file mode 100644
index 0000000..50f176d
--- /dev/null
+++ b/modules/role/manifests/logstash/apifeatureusage.pp
@@ -0,0 +1,43 @@
+# vim:sw=4 ts=4 sts=4 et:
+# == Class: role::logstash::apifeatureusage
+#
+# Builds on role::logstash to insert sanitized data for
+# Extension:ApiFeatureUsage into Elasticsearch.
+#
+class role::logstash::apifeatureusage {
+    include ::role::logstash::collector
+
+    # FIXME: make this a param and use hiera to vary by realm
+    $host            = $::realm ? {
+        'production' => '10.2.2.30', # search.svc.eqiad.wmnet
+        'labs'       => 'deployment-elastic05', # Pick one at random
+    }
+
+    # Template for Elasticsearch index creation
+    # lint:ignore:puppet_url_without_modules
+    file { '/etc/logstash/apifeatureusage-template.json':
+        ensure => present,
+        source => 
'puppet:///modules/role/logstash/apifeatureusage-template.json',
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0444',
+    }
+
+    # Add configuration to logstash
+    # Needs to come after 'filter_mediawiki' (priority 50)
+    logstash::conf { 'filter_apifeatureusage':
+        source   => 
'puppet:///modules/role/logstash/filter-apifeatureusage.conf',
+        priority => 55,
+    }
+    # lint:endignore
+
+    # Output destined for separate Elasticsearch cluster from Logstash cluster
+    logstash::output::elasticsearch { 'apifeatureusage':
+        host            => $host,
+        guard_condition => '[type] == "api-feature-usage-sanitized"',
+        manage_indices  => true,
+        priority        => 95,
+        template        => '/etc/logstash/apifeatureusage-template.json',
+        require         => File['/etc/logstash/apifeatureusage-template.json'],
+    }
+}
diff --git a/modules/role/manifests/logstash/collector.pp 
b/modules/role/manifests/logstash/collector.pp
new file mode 100644
index 0000000..7497634
--- /dev/null
+++ b/modules/role/manifests/logstash/collector.pp
@@ -0,0 +1,203 @@
+# vim:sw=4 ts=4 sts=4 et:
+# == Class: role::logstash::collector
+#
+# Provisions Logstash and an Elasticsearch node to proxy requests to ELK stack
+# Elasticsearch cluster.
+#
+# == Parameters:
+# - $statsd_host: Host to send statsd data to.
+#
+class role::logstash::collector (
+    $statsd_host,
+) {
+    include ::role::logstash::elasticsearch
+    include ::logstash
+    include base::firewall
+
+    nrpe::monitor_service { 'logstash':
+        description  => 'logstash process',
+        nrpe_command => '/usr/lib/nagios/plugins/check_procs -c 1:1 -u 
logstash -C java -a logstash',
+    }
+
+    ## Inputs (10)
+
+    logstash::input::udp2log { 'mediawiki':
+        port => 8324,
+    }
+
+    ferm::service { 'logstash_udp2log':
+        proto   => 'udp',
+        port    => '8324',
+        notrack => true,
+        srange  => '$DOMAIN_NETWORKS',
+    }
+
+    logstash::input::syslog { 'syslog':
+        port => 10514,
+    }
+
+    ferm::service { 'logstash_syslog':
+        proto   => 'udp',
+        port    => '10514',
+        notrack => true,
+        srange  => '$DOMAIN_NETWORKS',
+    }
+
+    ferm::service { 'grafana_dashboard_definition_storage':
+        proto  => 'tcp',
+        port   => '9200',
+        srange => '@resolve(krypton.eqiad.wmnet)',
+    }
+
+    ferm::service { 'logstash_canary_checker_reporting':
+        proto  => 'tcp',
+        port   => '9200',
+        srange => '($DEPLOYMENT_HOSTS $MAINTENANCE_HOSTS)',
+    }
+
+    logstash::input::gelf { 'gelf':
+        port => 12201,
+    }
+
+    ferm::service { 'logstash_gelf':
+        proto   => 'udp',
+        port    => '12201',
+        notrack => true,
+        srange  => '$DOMAIN_NETWORKS',
+    }
+
+    logstash::input::log4j { 'log4j': }
+
+    ferm::service { 'logstash_log4j':
+        proto   => 'tcp',
+        port    => '4560',
+        notrack => true,
+        srange  => '$DOMAIN_NETWORKS',
+    }
+
+    # Also used for UDP JSON logging from python-logstash lib (e.g. Striker)
+    logstash::input::udp { 'logback':
+        port  => 11514,
+        codec => 'json',
+    }
+
+    ferm::service { 'logstash_udp':
+        proto   => 'udp',
+        port    => '11514',
+        notrack => true,
+        srange  => '$DOMAIN_NETWORKS',
+    }
+
+    ## Global pre-processing (15)
+
+    # move files into module?
+    # lint:ignore:puppet_url_without_modules
+    logstash::conf { 'filter_strip_ansi_color':
+        source   => 
'puppet:///modules/role/logstash/filter-strip-ansi-color.conf',
+        priority => 15,
+    }
+
+    ## Input specific processing (20)
+
+    logstash::conf { 'filter_syslog':
+        source   => 'puppet:///modules/role/logstash/filter-syslog.conf',
+        priority => 20,
+    }
+
+    logstash::conf { 'filter_udp2log':
+        source   => 'puppet:///modules/role/logstash/filter-udp2log.conf',
+        priority => 20,
+    }
+
+    logstash::conf { 'filter_gelf':
+        source   => 'puppet:///modules/role/logstash/filter-gelf.conf',
+        priority => 20,
+    }
+
+    logstash::conf { 'filter_logback':
+        source   => 'puppet:///modules/role/logstash/filter-logback.conf',
+        priority => 20,
+    }
+
+    ## Application specific processing (50)
+
+    logstash::conf { 'filter_mediawiki':
+        source   => 'puppet:///modules/role/logstash/filter-mediawiki.conf',
+        priority => 50,
+    }
+
+    logstash::conf { 'filter_striker':
+        source   => 'puppet:///modules/role/logstash/filter-striker.conf',
+        priority => 50,
+    }
+
+    ## Global post-processing (70)
+
+    logstash::conf { 'filter_add_normalized_message':
+        source   => 
'puppet:///modules/role/logstash/filter-add-normalized-message.conf',
+        priority => 70,
+    }
+
+    logstash::conf { 'filter_normalize_log_levels':
+        source   => 
'puppet:///modules/role/logstash/filter-normalize-log-levels.conf',
+        priority => 70,
+    }
+
+    logstash::conf { 'filter_de_dot':
+        source   => 'puppet:///modules/role/logstash/filter-de_dot.conf',
+        priority => 70,
+    }
+
+    ## Outputs (90)
+    # Template for Elasticsearch index creation
+    file { '/etc/logstash/elasticsearch-template.json':
+        ensure => present,
+        source => 
'puppet:///modules/role/logstash/elasticsearch-template.json',
+        owner  => 'root',
+        group  => 'root',
+        mode   => '0444',
+    }
+    # lint:endignore
+
+    logstash::output::elasticsearch { 'logstash':
+        host            => '127.0.0.1',
+        guard_condition => '"es" in [tags]',
+        manage_indices  => true,
+        priority        => 90,
+        template        => '/etc/logstash/elasticsearch-template.json',
+        require         => File['/etc/logstash/elasticsearch-template.json'],
+    }
+
+    logstash::output::statsd { 'MW_channel_rate':
+        host            => $statsd_host,
+        guard_condition => '[type] == "mediawiki" and "es" in [tags]',
+        namespace       => 'logstash.rate',
+        sender          => 'mediawiki',
+        increment       => [ '%{channel}.%{level}' ],
+    }
+
+    logstash::output::statsd { 'OOM_channel_rate':
+        host            => $statsd_host,
+        guard_condition => '[type] == "hhvm" and [message] =~ "request has 
exceeded memory limit"',
+        namespace       => 'logstash.rate',
+        sender          => 'oom',
+        increment       => [ '%{level}' ],
+    }
+
+    logstash::output::statsd { 'HHVM_channel_rate':
+        host            => $statsd_host,
+        guard_condition => '[type] == "hhvm" and [message] !~ "request has 
exceeded memory limit"',
+        namespace       => 'logstash.rate',
+        sender          => 'hhvm',
+        increment       => [ '%{level}' ],
+    }
+
+    logstash::output::statsd { 'Apache2_channel_rate':
+        host            => $statsd_host,
+        guard_condition => '[type] == "apache2" and "syslog" in [tags]',
+        namespace       => 'logstash.rate',
+        sender          => 'apache2',
+        increment       => [ '%{level}' ],
+    }
+}
+
diff --git a/modules/role/manifests/logstash/elasticsearch.pp 
b/modules/role/manifests/logstash/elasticsearch.pp
new file mode 100644
index 0000000..e1add0d
--- /dev/null
+++ b/modules/role/manifests/logstash/elasticsearch.pp
@@ -0,0 +1,34 @@
+# vim:sw=4 ts=4 sts=4 et:
+# == Class: role::logstash::elasticsearch
+#
+# Provisions Elasticsearch backend node for a Logstash cluster.
+#
+class role::logstash::elasticsearch {
+    include standard
+    include ::elasticsearch::nagios::check
+    include ::elasticsearch::monitor::diamond
+    include base::firewall
+
+    if $::standard::has_ganglia {
+        include ::elasticsearch::ganglia
+    }
+
+    package { 'elasticsearch/plugins':
+        provider => 'trebuchet',
+    }
+
+    class { '::elasticsearch':
+        require      => Package['elasticsearch/plugins'],
+        java_package => 'openjdk-8-jdk',
+    }
+
+    $logstash_nodes = hiera('logstash::cluster_hosts')
+    $logstash_nodes_ferm = join($logstash_nodes, ' ')
+
+    ferm::service { 'logstash_elastic_internode':
+        proto   => 'tcp',
+        port    => 9300,
+        notrack => true,
+        srange  => "@resolve((${logstash_nodes_ferm}))",
+    }
+}
diff --git a/modules/role/manifests/logstash/eventlogging.pp 
b/modules/role/manifests/logstash/eventlogging.pp
new file mode 100644
index 0000000..f7191cf
--- /dev/null
+++ b/modules/role/manifests/logstash/eventlogging.pp
@@ -0,0 +1,23 @@
+# vim:sw=4 ts=4 sts=4 et:
+# == Class: role::logstash::eventlogging
+#
+# Configure Logstash to consume validation logs from EventLogging.
+#
+class role::logstash::eventlogging {
+    include ::role::logstash::collector
+
+    $topic = 'eventlogging_EventError'
+    $kafka_config = kafka_config('analytics')
+
+    logstash::input::kafka { $topic:
+        tags       => [$topic, 'kafka'],
+        type       => 'eventlogging',
+        zk_connect => $kafka_config['zookeeper']['url'],
+    }
+    # lint:ignore:puppet_url_without_modules
+    logstash::conf { 'filter_eventlogging':
+        source   => 'puppet:///modules/role/logstash/filter-eventlogging.conf',
+        priority => 50,
+    }
+    # lint:endignore
+}
diff --git a/modules/role/manifests/logstash/puppetreports.pp 
b/modules/role/manifests/logstash/puppetreports.pp
new file mode 100644
index 0000000..3fee2b2
--- /dev/null
+++ b/modules/role/manifests/logstash/puppetreports.pp
@@ -0,0 +1,30 @@
+# vim:sw=4 ts=4 sts=4 et:
+# == Class: role::logstash::puppetreports
+#
+# Set up a TCP listener to listen for puppet failure reports.
+class role::logstash::puppetreports {
+    require ::role::logstash::collector
+
+    if $::realm != 'labs' {
+        # Constrain to only labs, security issues in prod have not been worked 
out yet
+        fail('role::logstash::puppetreports may only be deployed to Labs.')
+    }
+
+    logstash::input::tcp { 'tcp_json':
+        port  => 5229,
+        codec => 'json_lines',
+    }
+
+    ferm::service { 'logstash_tcp_json':
+        proto  => 'tcp',
+        port   => '5229',
+        srange => '$DOMAIN_NETWORKS',
+    }
+
+    # lint:ignore:puppet_url_without_modules
+    logstash::conf { 'filter_puppet':
+        source   => 'puppet:///modules/role/logstash/filter-puppet.conf',
+        priority => 50,
+    }
+    # lint:endignore
+}

-- 
To view, visit https://gerrit.wikimedia.org/r/323333
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Ifc02cb52cc6a49611fbc6c1158f6fd87b74abb16
Gerrit-PatchSet: 10
Gerrit-Project: operations/puppet
Gerrit-Branch: production
Gerrit-Owner: BryanDavis <bda...@wikimedia.org>
Gerrit-Reviewer: Dzahn <dz...@wikimedia.org>
Gerrit-Reviewer: Gehel <gleder...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to