Giuseppe Lavagetto has submitted this change and it was merged. ( 
https://gerrit.wikimedia.org/r/398795 )

Change subject: puppet-compiler: extract facts from puppetDB
......................................................................


puppet-compiler: extract facts from puppetDB

This is desirable because:
- prevents us from downloading stringified facts from the report handler
without need for heuristics
- Gets the latest facts and is guaranteed to work in the future
- Simplifies the whole procedure by a lot.

Change-Id: Iec9d46d00e68eedf210c88ca1a1a810506f5bfd3
---
M modules/puppet_compiler/files/compiler-update-facts
D modules/puppetmaster/files/puppet-facts-export
A modules/puppetmaster/files/puppet-facts-export.py
M modules/puppetmaster/manifests/scripts.pp
4 files changed, 79 insertions(+), 59 deletions(-)

Approvals:
  Giuseppe Lavagetto: Looks good to me, approved
  jenkins-bot: Verified
  Volans: Looks good to me, but someone else must approve



diff --git a/modules/puppet_compiler/files/compiler-update-facts 
b/modules/puppet_compiler/files/compiler-update-facts
index c0d70b4..215e92c 100755
--- a/modules/puppet_compiler/files/compiler-update-facts
+++ b/modules/puppet_compiler/files/compiler-update-facts
@@ -19,23 +19,14 @@
 
 MASTERS=${PUPPET_MASTERS:-$(ruby -e "${RUBY_SCRIPT}" < 
hieradata/common/puppetmaster.yaml)}
 
-ssh "$COMPILER" 'sudo rm -rf /tmp/catalogs; sudo mkdir -p /tmp/catalogs'
-# Gather all the facts from all the masters. This can't really be atomic and We
-# want to minimize race conditions in the generation without using background
-# processes and multiconcurrency to avoid complexity so do this in 2 for loops
-for master in $MASTERS; do
-    echo -e "\n### Syncing facts from $master"
-    ssh "$master" 'sudo /usr/local/bin/puppet-facts-export 1>/dev/null'
-done
-for master in $MASTERS; do
-    # tunnel via your localhost without ever the file touching the disk
-    ssh "$master" cat /tmp/puppet-facts-export.tar.xz | ssh "$COMPILER" 'cat > 
puppet-facts-export.tar.xz'
-    ssh "$master" 'sudo rm /tmp/puppet-facts-export.tar.xz'
-    # Note: The $$ trick works because this is all a single command, it should 
not be split into multiple ones
-    ssh "$COMPILER" 'mkdir -p /tmp/catalogs.$$ && tar Jxf 
puppet-facts-export.tar.xz --directory /tmp/catalogs.$$ \
-&& sudo rsync -au /tmp/catalogs.$$/ /tmp/catalogs/ && rm -rf /tmp/catalogs.$$ 
&& rm puppet-facts-export.tar.xz'
-    # Do not modify the files in /tmp/catalogs, the modified date counts for 
the rsync
-    ssh "$COMPILER" 'sudo chown -R jenkins-deploy:wikidev /tmp/catalogs/yaml'
-done
+ssh "$COMPILER" 'sudo rm -rf /tmp/facts; sudo mkdir -p /tmp/facts'
+# Gather all the facts from one of the masters.
+master=$MASTERS[0]
+echo -e "\n### Syncing facts from $master"
+ssh "$master" 'sudo /usr/local/bin/puppet-facts-export 1>/dev/null'
+# tunnel via your localhost without ever the file touching the disk
+ssh "$master" cat /tmp/puppet-facts-export.tar.xz | ssh "$COMPILER" 'cat > 
puppet-facts-export.tar.xz'
+ssh "$master" 'sudo rm /tmp/puppet-facts-export.tar.xz'
+ssh "$COMPILER" 'tar Jxf puppet-facts-export.tar.xz --directory /tmp/facts && 
sudo chown -R jenkins-deploy:wikidev /tmp/facts/yaml'
 # Finally, copy all the facts to destination and cleanup
-ssh "$COMPILER" 'sudo rsync --delete -au /tmp/catalogs/yaml/ 
/var/lib/catalog-differ/puppet/yaml/ && sudo rm -rf /tmp/catalogs'
+ssh "$COMPILER" 'sudo rsync --delete -au /tmp/facts/yaml/ 
/var/lib/catalog-differ/puppet/yaml/ && sudo rm -rf /tmp/facts'
diff --git a/modules/puppetmaster/files/puppet-facts-export 
b/modules/puppetmaster/files/puppet-facts-export
deleted file mode 100755
index 5ffc99a..0000000
--- a/modules/puppetmaster/files/puppet-facts-export
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-set -e
-set -u
-
-if [ `whoami` != "root" ]; then
-    echo "Needs to be run as root"
-    exit
-fi
-
-tmpdir=$(mktemp -d)
-mkdir -p $tmpdir/yaml/facts
-outfile=/tmp/puppet-facts-export.tar.xz
-factsdir=/var/lib/puppet/yaml/facts
-
-function cleanup() {
-    rm -rf "$tmpdir"
-}
-
-trap cleanup EXIT
-
-rsync -a0 \
-    --files-from=<(find $factsdir -type f -mtime -7 -printf "%f\0") $factsdir \
-    "$tmpdir/yaml/facts"
-chown -R "${USER}" "$tmpdir/yaml"
-
-for FILE in "${factsdir}"/*.yaml; do
-    TIME=$(stat -c "%y" "${FILE}")
-    sed -i -e 's@uniqueid:.*@uniqueid: "43434343"@' \
-        -e 's@boardserialnumber:.*@boardserialnumber: "4242"@' \
-        -e 's@boardproductname:.*@boardproductname: "424242"@' \
-        -e 's@serialnumber:.*@serialnumber: "42424242"@' \
-        -e '/^ *trusted\:/ d' "${FILE}"
-    touch -d "${TIME}" "${FILE}"
-done
-
-tar cJvf $outfile --directory "$tmpdir" yaml
-
-echo "puppet facts sanitized and exported at $outfile"
diff --git a/modules/puppetmaster/files/puppet-facts-export.py 
b/modules/puppetmaster/files/puppet-facts-export.py
new file mode 100755
index 0000000..b34509b
--- /dev/null
+++ b/modules/puppetmaster/files/puppet-facts-export.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python3
+import configparser
+import os
+import shutil
+import subprocess
+import tempfile
+
+import requests
+import yaml
+
+from datetime import datetime, timedelta
+
+
+class PuppetDBApi(object):
+    def __init__(self, puppetdb_config_file):
+        config = configparser.ConfigParser()
+        config.read(puppetdb_config_file)
+        self.host = config['main']['server']
+        self.port = config['main']['port']
+
+    def url_for(self, endpoint):
+        # TODO: use /pdb/query/v4 when we upgrade.
+        return 'https://{h}:{p}/v4/{ep}'.format(h=self.host, p=self.port, 
ep=endpoint)
+
+    def get(self, endpoint):
+        return requests.get(self.url_for(endpoint)).json()
+
+
+def main():
+    date_format = '%Y-%m-%d %H:%M:%S.%s +00:00'
+    datetime_facts = datetime.utcnow()
+    ts = datetime_facts.strftime(date_format)
+    exp = (datetime_facts + timedelta(days=1)).strftime(date_format)
+
+    outfile = '/tmp/puppet-facts-export.tar.xz'
+    tmpdir = tempfile.mkdtemp(dir='/tmp', prefix='puppetdb-export')
+    factsdir = os.path.join(tmpdir, 'yaml', 'facts')
+    print("Saving facts to {}".format(factsdir))
+    os.makedirs(factsdir)
+    conf = os.environ.get('PUPPETDB_CONFIG_FILE', '/etc/puppet/puppetdb.conf')
+    pdb = PuppetDBApi(conf)
+    for node in pdb.get('nodes/'):
+        if node.get('deactivated', True) is not None:
+            continue
+        nodename = node['certname']
+        yaml_data = {}
+        facts = pdb.get('nodes/{}/facts'.format(nodename))
+        for fact in facts:
+            yaml_data[fact['name']] = fact['value']
+        filename = os.path.join(factsdir, "{}.yaml".format(nodename))
+        # Anonymize potentially reserved data
+        yaml_data['uniqueid'] = '43434343'
+        yaml_data['boardserialnumber'] = '4242'
+        yaml_data['boardproductname'] = '424242'
+        yaml_data['serialnumber'] = '42424242'
+        del yaml_data['trusted']
+        print('Writing {}'.format(filename))
+        with open(filename, 'w') as fh:
+            contents = yaml.dump({'name': nodename, 'values': yaml_data,
+                                  'timestamp': ts, 'expiration': exp})
+            fh.write('--- !ruby/object:Puppet::Node::Facts\n' + contents)
+    subprocess.check_call(['tar', 'cJvf', outfile, '--directory', tmpdir, 
'yaml'])
+    shutil.rmtree(tmpdir)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/modules/puppetmaster/manifests/scripts.pp 
b/modules/puppetmaster/manifests/scripts.pp
index 585072e..3decc1b 100644
--- a/modules/puppetmaster/manifests/scripts.pp
+++ b/modules/puppetmaster/manifests/scripts.pp
@@ -23,12 +23,13 @@
     }
 
     # export and sanitize facts for puppet compiler
+    require_package('python3-requests', 'python3-yaml')
     file {'/usr/local/bin/puppet-facts-export':
         ensure => present,
         owner  => 'root',
         group  => 'root',
         mode   => '0555',
-        source => 'puppet:///modules/puppetmaster/puppet-facts-export',
+        source => 'puppet:///modules/puppetmaster/puppet-facts-export.py',
     }
 
     # Clear out older reports

-- 
To view, visit https://gerrit.wikimedia.org/r/398795
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Iec9d46d00e68eedf210c88ca1a1a810506f5bfd3
Gerrit-PatchSet: 6
Gerrit-Project: operations/puppet
Gerrit-Branch: production
Gerrit-Owner: Giuseppe Lavagetto <glavage...@wikimedia.org>
Gerrit-Reviewer: Alexandros Kosiaris <akosia...@wikimedia.org>
Gerrit-Reviewer: Elukey <ltosc...@wikimedia.org>
Gerrit-Reviewer: Giuseppe Lavagetto <glavage...@wikimedia.org>
Gerrit-Reviewer: Volans <rcocci...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to