aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHugo Hörnquist <hugo@lysator.liu.se>2022-06-18 03:15:19 +0200
committerHugo Hörnquist <hugo@lysator.liu.se>2022-06-18 03:15:19 +0200
commite1b9a007b2abaf6d6ec85986c0262a2abb86a889 (patch)
tree4971f5d643fc6c31c02bb2e8d6bac07db5784dfb
parentPydocstyle. (diff)
downloadpuppet-classifier-e1b9a007b2abaf6d6ec85986c0262a2abb86a889.tar.gz
puppet-classifier-e1b9a007b2abaf6d6ec85986c0262a2abb86a889.tar.xz
Add some python scripts.
-rwxr-xr-xcommit_classes.py39
-rwxr-xr-xenumerate_classes.py84
-rwxr-xr-ximport_yaml.py37
3 files changed, 160 insertions, 0 deletions
diff --git a/commit_classes.py b/commit_classes.py
new file mode 100755
index 0000000..c6c8bc7
--- /dev/null
+++ b/commit_classes.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+import json
+
+from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
+
+import pyenc
+from pyenc.db import db
+import pyenc.model as model
+
+app = pyenc.create_app()
+app.app_context().push()
+
+
+"""
+Fetch all found classes from redis, handle the data, and commit it to
+our true database.
+"""
+
+# TODO this inserts already existing classes
+# It shouldn't
+for puppet_file in model.PuppetFile.query.all():
+ data = json.loads(puppet_file.json)
+ top = data['^']
+ if top[0] == 'class':
+ tmp = top[1]['#']
+ idx = tmp.index('name')
+ db.session.add(model.PuppetClass(
+ class_name=tmp[idx + 1],
+ comes_from=puppet_file))
+ # print(tmp[idx + 1])
+ elif top[0] == 'block':
+ for element in top[1:]:
+ if element['^'][0] == 'class':
+ tmp = element['^'][1]['#']
+ idx = tmp.index('name')
+ db.session.add(model.PuppetClass(
+ class_name=tmp[idx + 1],
+ comes_from=puppet_file))
+db.session.commit()
diff --git a/enumerate_classes.py b/enumerate_classes.py
new file mode 100755
index 0000000..bfa4343
--- /dev/null
+++ b/enumerate_classes.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+
+"""
+Loads all puppet files in environment, parse them, and store the
+parsed data to redis.
+Later run commit_classes to save them permanently.
+"""
+
+import subprocess
+import json
+import os
+import time
+
+import pyenc
+from pyenc.db import db
+import pyenc.model as model
+
+
+def find(path, **kvs):
+ """Wrapper around find(1)."""
+ cmdline = ['find', path]
+ for k, v in kvs.items():
+ cmdline.append(f'-{k}')
+ cmdline.append(v)
+ cmdline.append('-print0')
+
+ cmd = subprocess.run(cmdline, capture_output=True)
+ return (f for f in cmd.stdout.split(b'\0') if f)
+
+
+def parse_files(files):
+ for i, file in enumerate(files):
+ st = os.stat(file)
+
+ last_modify = st.st_mtime
+ old_object = model.PuppetFile.query \
+ .where(model.PuppetFile.path == file) \
+ .first()
+
+ if old_object and old_object.last_parse > last_modify:
+ # file unchanged since our last parse, skip
+ continue
+
+ print(f'{i}/{len(files)}: {file}')
+
+ cmd = subprocess.Popen(
+ ['puppet', 'parser', 'dump', '--format', 'json', file],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ if cmd.returncode and cmd.returncode != 0:
+ print("Parsing failed")
+ print(cmd.returncode)
+ print(cmd.stderr.read())
+ else:
+ if old_object:
+ m = old_object
+ else:
+ m = model.PuppetFile(path=file)
+ m.last_parse = time.time()
+ m.json = cmd.stdout.read()
+
+ if cmd.wait() != 0:
+ print("Parsing failed (late version)")
+ print(cmd.stderr.read().decode('UTF-8'))
+ continue
+
+ db.session.add(m)
+ db.session.commit()
+
+
+def main():
+ app = pyenc.create_app()
+ app.app_context().push()
+
+ path = '/var/lib/machines/busting/etc/puppetlabs/code/environments/production'
+
+ files_gen = find(path, type='f', name='*.pp')
+ files = [f for f in files_gen]
+
+ parse_files(files)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/import_yaml.py b/import_yaml.py
new file mode 100755
index 0000000..35558c8
--- /dev/null
+++ b/import_yaml.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+
+"""Import extisting nodes.yaml into database"""
+
+import json
+import yaml
+
+import pyenc
+from pyenc.db import db
+import pyenc.model as model
+
+app = pyenc.create_app()
+app.app_context().push()
+
+
+with open('/usr/local/puppet/nodes.yaml') as f:
+ data = yaml.full_load(f)
+
+
+for fqdn, val in data.items():
+ h = model.Host.query.where(model.Host.fqdn == fqdn).first()
+ if not h:
+ h = model.Host(fqdn=fqdn)
+ h.environment = val.get('environment')
+ print(h)
+
+ classes = val['classes']
+ if type(classes) == dict:
+ classes = classes.keys()
+ cls = model.PuppetClass.query \
+ .where(model.PuppetClass.class_name.in_(classes)).all()
+ print(cls)
+
+ h.classes.extend(cls)
+
+
+db.session.commit()