aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHugo Hörnquist <hugo@lysator.liu.se>2022-06-19 00:22:38 +0200
committerHugo Hörnquist <hugo@lysator.liu.se>2022-06-19 00:22:38 +0200
commit966495ab84cc36b7c8bfbd9e1861d564fdc2573d (patch)
treeebd829c7c7b0bfb7cf37f913fae4af3b8188d1df
parentAdd some python scripts. (diff)
downloadpuppet-classifier-966495ab84cc36b7c8bfbd9e1861d564fdc2573d.tar.gz
puppet-classifier-966495ab84cc36b7c8bfbd9e1861d564fdc2573d.tar.xz
Merge commit_classes into enumerate_classes.
-rwxr-xr-xcommit_classes.py39
-rwxr-xr-xenumerate_classes.py118
2 files changed, 88 insertions, 69 deletions
diff --git a/commit_classes.py b/commit_classes.py
deleted file mode 100755
index c6c8bc7..0000000
--- a/commit_classes.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-import json
-
-from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
-
-import pyenc
-from pyenc.db import db
-import pyenc.model as model
-
-app = pyenc.create_app()
-app.app_context().push()
-
-
-"""
-Fetch all found classes from redis, handle the data, and commit it to
-our true database.
-"""
-
-# TODO this inserts already existing classes
-# It shouldn't
-for puppet_file in model.PuppetFile.query.all():
- data = json.loads(puppet_file.json)
- top = data['^']
- if top[0] == 'class':
- tmp = top[1]['#']
- idx = tmp.index('name')
- db.session.add(model.PuppetClass(
- class_name=tmp[idx + 1],
- comes_from=puppet_file))
- # print(tmp[idx + 1])
- elif top[0] == 'block':
- for element in top[1:]:
- if element['^'][0] == 'class':
- tmp = element['^'][1]['#']
- idx = tmp.index('name')
- db.session.add(model.PuppetClass(
- class_name=tmp[idx + 1],
- comes_from=puppet_file))
-db.session.commit()
diff --git a/enumerate_classes.py b/enumerate_classes.py
index bfa4343..c9e1c4b 100755
--- a/enumerate_classes.py
+++ b/enumerate_classes.py
@@ -2,8 +2,7 @@
"""
Loads all puppet files in environment, parse them, and store the
-parsed data to redis.
-Later run commit_classes to save them permanently.
+parsed data in the database.
"""
import subprocess
@@ -28,44 +27,85 @@ def find(path, **kvs):
return (f for f in cmd.stdout.split(b'\0') if f)
+class PuppetParseError(Exception):
+ def __init__(self, code, msg):
+ self.code = code
+ self.msg = msg
+
+ def __repr__(self):
+ return f'PuppetParserError({self.code}, {self.msg})'
+
+ def __str__(self):
+ return repr(self)
+
+
+def puppet_parse(file):
+ cmd = subprocess.Popen(
+ ['puppet', 'parser', 'dump', '--format', 'json', file],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ if cmd.returncode and cmd.returncode != 0:
+ raise PuppetParseError(cmd.returncode, cmd.stderr.read().decode('UTF-8'))
+ else:
+ json = cmd.stdout.read()
+
+ if (value := cmd.wait()) != 0:
+ raise PuppetParseError(value, cmd.stderr.read().decode('UTF-8'))
+
+ return json
+
+
def parse_files(files):
for i, file in enumerate(files):
- st = os.stat(file)
+ try:
+ st = os.stat(file)
- last_modify = st.st_mtime
- old_object = model.PuppetFile.query \
- .where(model.PuppetFile.path == file) \
- .first()
+ last_modify = st.st_mtime
+ old_object = model.PuppetFile.query \
+ .where(model.PuppetFile.path == file) \
+ .first()
- if old_object and old_object.last_parse > last_modify:
- # file unchanged since our last parse, skip
- continue
+ if old_object and old_object.last_parse > last_modify:
+ # file unchanged since our last parse, skip
+ continue
+
+ print(f'{i}/{len(files)}: {file}')
- print(f'{i}/{len(files)}: {file}')
-
- cmd = subprocess.Popen(
- ['puppet', 'parser', 'dump', '--format', 'json', file],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- if cmd.returncode and cmd.returncode != 0:
- print("Parsing failed")
- print(cmd.returncode)
- print(cmd.stderr.read())
- else:
if old_object:
m = old_object
else:
m = model.PuppetFile(path=file)
m.last_parse = time.time()
- m.json = cmd.stdout.read()
+ m.json = puppet_parse(file)
+
+ yield m
+
+ except PuppetParseError as e:
+ # TODO cache error
+ print('Error:', e)
+ continue
+
+
+def interpret_file(json_data):
+ """Find all classes in json-representation of file."""
+ top = json_data['^']
+ if top[0] == 'class':
+ tmp = top[1]['#']
+ idx = tmp.index('name')
+ return [tmp[idx + 1]]
+ # print(tmp[idx + 1])
+ elif top[0] == 'block':
+ ret_value = []
+ for element in top[1:]:
+ if element['^'][0] == 'class':
+ tmp = element['^'][1]['#']
+ idx = tmp.index('name')
+ ret_value.append(tmp[idx + 1])
+ return ret_value
+ else:
+ return []
- if cmd.wait() != 0:
- print("Parsing failed (late version)")
- print(cmd.stderr.read().decode('UTF-8'))
- continue
- db.session.add(m)
- db.session.commit()
def main():
@@ -77,8 +117,26 @@ def main():
files_gen = find(path, type='f', name='*.pp')
files = [f for f in files_gen]
- parse_files(files)
-
+ try:
+ for puppet_file in parse_files(files):
+ db.session.add(puppet_file)
+ finally:
+ db.session.commit()
+
+
+ try:
+ for puppet_file in model.PuppetFile.query.all():
+ try:
+ class_names = interpret_file(json.loads(puppet_file.json))
+ for class_name in class_names:
+ db.session.add(model.PuppetClass(
+ class_name=class_name,
+ comes_from=puppet_file))
+ except Exception as e:
+ print(e)
+ print(f'Failed: {puppet_file.path}')
+ finally:
+ db.session.commit()
if __name__ == '__main__':
main()