aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHugo Hörnquist <hugo@lysator.liu.se>2023-06-04 01:59:54 +0200
committerHugo Hörnquist <hugo@lysator.liu.se>2023-06-04 01:59:56 +0200
commit07ba1d8057e1bee6b7ed14a4d51815ef3161dd77 (patch)
tree02689fba4776c3bc5c69ec6f06694e8389ae3152
parentAdd summary to main index page. (diff)
downloadmuppet-strings-07ba1d8057e1bee6b7ed14a4d51815ef3161dd77.tar.gz
muppet-strings-07ba1d8057e1bee6b7ed14a4d51815ef3161dd77.tar.xz
Split __main__ into multiple modules.
This also allows sphinx to actually find these declarations.
-rw-r--r--muppet/__init__.py6
-rw-r--r--muppet/__main__.py400
-rw-r--r--muppet/gather.py103
-rw-r--r--muppet/output.py270
-rw-r--r--muppet/puppet/strings.py19
-rw-r--r--muppet/util.py28
-rw-r--r--templates/index.html1
7 files changed, 431 insertions, 396 deletions
diff --git a/muppet/__init__.py b/muppet/__init__.py
index a4e55ec..d48d2b3 100644
--- a/muppet/__init__.py
+++ b/muppet/__init__.py
@@ -1 +1,7 @@
+"""
+Muppet - Multiple docstrings for puppet.
+
+This entry point currently only conaitns the verision...
+"""
+
VERSION = '0.0.1'
diff --git a/muppet/__main__.py b/muppet/__main__.py
index 88d6968..a71363c 100644
--- a/muppet/__main__.py
+++ b/muppet/__main__.py
@@ -1,42 +1,10 @@
"""New, better, entry point."""
import argparse
-import os
-import os.path
-from dataclasses import dataclass
-import hashlib
-from jinja2 import (
- Environment,
- # PackageLoader,
- FileSystemLoader,
-)
-import pathlib
-import json
-from typing import (
- Any,
- TypeVar,
- Callable,
- TypedDict,
- NotRequired,
-)
-from collections.abc import (
- Iterable,
- Sequence,
-)
from .cache import Cache
-from .puppet.strings import puppet_strings
-from .format import (
- format_class,
- format_type_alias,
-)
-from .lookup import lookup, Ref
-from commonmark import commonmark
-
-jinja = Environment(
- loader=FileSystemLoader('templates'),
- autoescape=False,
-)
+from .gather import get_modules
+from .output import setup_index, setup_module
parser = argparse.ArgumentParser(
prog='puppet-doc configure',
@@ -48,370 +16,10 @@ args = parser.parse_args()
env = args.env or '/etc/puppetlabs/code/modules'
-cache = Cache('/home/hugo/.cache/puppet-doc')
-
-
-@dataclass
-class ModuleEntry:
- """
- One entry in a module.
-
- Parameters:
- name - local name of the module, should always be the basename
- of path
- path - Absolute path in the filesystem where the module can be
- found.
- strings_output - output of `puppet strings`.
- """
-
- name: str
- path: str
- strings_output: bytes
- metadata: dict[str, Any]
-
- def file(self, path: str) -> str:
- """Return the absolute path of a path inside the module."""
- return os.path.join(self.path, path)
-
-
-def get_puppet_strings(path: str) -> bytes:
- """
- Run puppet string, but check cache first.
-
- The cache uses the contents of metadata.json as its key,
- so any updates without an updated metadata.json wont't be
- detected.
-
- Hashing the entire contents of the module was tested, but was to
- slow.
- """
- try:
- with open(os.path.join(path, 'metadata.json'), 'rb') as f:
- data = f.read()
- key = 'puppet-strings' + hashlib.sha1(data).hexdigest()
- if parsed := cache.get(key):
- result = parsed
- else:
- result = puppet_strings(path)
- cache.put(key, result)
- return result
- except FileNotFoundError:
- # TODO actually run puppet strings again.
- # This is just since without a metadata.json we always get a
- # cache miss, which is slow.
- # return puppet_strings(path)
- return b''
-
- # try:
- # with open(module.file('.git/FETCH_HEAD')) as f:
- # st = os.stat(f.fileno())
- # st.st_mtime
- # except FileNotFoundError:
- # pass
-
-
-def get_modules(dir: str) -> list[ModuleEntry]:
- """
- Enumerate modules in directory.
-
- The directory should be the modules subdirectory of an environment,
- e.g. /etc/puppetlabs/code/environments/production/modules.
- """
- modules: list[ModuleEntry] = []
-
- for entry in sorted(list(os.scandir(dir)), key=lambda d: d.name):
- # TODO Logging
- # print('- entry', entry, file=sys.stderr)
- name = entry.name
- path = os.path.join(env, entry)
- strings_data = get_puppet_strings(path)
-
- try:
- with open(os.path.join(path, 'metadata.json')) as f:
- metadata = json.load(f)
- except FileNotFoundError:
- metadata = {}
-
- modules.append(ModuleEntry(name, path, strings_data, metadata))
-
- return modules
-
-# --------------------------------------------------
-
-
-pathlib.Path('output').mkdir(exist_ok=True)
-
-
-T = TypeVar('T')
-U = TypeVar('U')
-
-
-def group_by(proc: Callable[[T], U], seq: Sequence[T]) -> dict[U, list[T]]:
- """
- Group elements in seq by proc.
-
- Return a dictionary mapping the result of proc onto lists of each
- element which evaluated to that key.
- """
- d: dict[U, list[T]] = {}
- for item in seq:
- key = proc(item)
- d[key] = (d.get(key) or []) + [item]
- return d
-
-
-def isprivate(entry: dict[str, Any]) -> bool:
- """
- Is the given puppet declaration marked private.
-
- Assumes input is a dictionary as returned by puppet strings, one
- of the entries in (for example) 'puppet_classes'.
-
- Currently only checks for an "@api private" tag.
- """
- if ds := entry.get('docstring'):
- if tags := ds.get('tags'):
- for tag in tags:
- if tag.get('tag_name') == 'api' and \
- tag.get('text') == 'private':
- return True
- return False
-
-
-def setup_index(base: str, modules: list[ModuleEntry]) -> None:
- """Create the main index.html file."""
- template = jinja.get_template('index.html')
- with open(os.path.join(base, 'index.html'), 'w') as f:
- f.write(template.render(modules=modules))
-
-
-class IndexItem(TypedDict):
- """A single list entry in a module index page."""
-
- name: str
- file: str
- summary: NotRequired[str]
-
-
-class IndexSubcategory(TypedDict):
- """A subheading on an index page."""
-
- title: str
- list: Iterable[IndexItem]
-
-
-class IndexCategory(TypedDict):
- """A top heading on an index page."""
-
- title: str
- list: Iterable[IndexSubcategory]
-
-
-def class_index(class_list: list) -> IndexCategory:
- """Prepage class index list."""
- groups = group_by(isprivate, class_list)
-
- lst: list[IndexSubcategory] = []
-
- if publics := groups.get(False):
- # print(publics[0]['docstring']['tags'])
- sublist: list[IndexItem] = []
- for i in publics:
- name = i['name']
- summary = lookup(i) \
- .ref('docstring') \
- .ref('tags') \
- .find(Ref('tag_name') == 'summary') \
- .ref('text') \
- .value()
-
- obj: IndexItem = {
- 'file': os.path.splitext(i['file'])[0],
- 'name': name,
- }
-
- if summary:
- obj['summary'] = commonmark(summary)
-
- sublist.append(obj)
-
- lst.append({
- 'title': 'Public Classes',
- 'list': sublist,
- })
-
- if privates := groups.get(True):
- lst.append({
- 'title': 'Private Classes',
- 'list': ({'name': i['name'],
- 'file': os.path.splitext(i['file'])[0]}
- for i in privates),
- })
-
- return {
- 'title': 'Classes',
- 'list': lst
- }
-
-
-def defined_types_index(defined_list: list) -> IndexCategory:
- """
- Prepare defined types index list.
-
- These are puppet types introduces by puppet code.
- Each only has one implemenattion.
- """
- groups = group_by(isprivate, defined_list)
-
- lst: list[IndexSubcategory] = []
-
- if publics := groups.get(False):
- lst.append({
- 'title': 'Public Defined Types',
- 'list': ({'name': i['name'],
- 'file': os.path.splitext(i['file'])[0]}
- for i in publics),
- })
-
- if privates := groups.get(True):
- lst.append({
- 'title': 'Private Defined Types',
- 'list': ({'name': i['name'],
- 'file': os.path.splitext(i['file'])[0]}
- for i in privates),
- })
-
- return {
- 'title': 'Defined Types',
- 'list': lst
- }
-
-
-def type_aliases_index(alias_list: list) -> IndexCategory:
- """Prepare type alias index list."""
- groups = group_by(isprivate, alias_list)
- lst: list[IndexSubcategory] = []
- if publics := groups.get(False):
- lst.append({
- 'title': 'Public Type Aliases',
- 'list': ({'name': i['name'],
- 'file': os.path.splitext(i['file'])[0]}
- for i in publics),
- })
-
- if privates := groups.get(True):
- lst.append({
- 'title': 'Private Type Aliases',
- 'list': ({'name': i['name'],
- 'file': os.path.splitext(i['file'])[0]}
- for i in privates),
- })
-
- return {
- 'title': 'Type Aliases',
- 'list': lst,
- }
-
-
-# def resource_types_index(resource_list: list) -> IndexCategory:
-# """
-# Prepare resource type index list.
-#
-# These are the resource types introduced through ruby. Each can
-# have multiple implementations.
-# """
-# return {}
-
-
-def setup_module_index(base: str, module: ModuleEntry, data: dict[str, Any]) -> None:
- """Create the index file for a specific module."""
- template = jinja.get_template('module_index.html')
-
- content = []
-
- content.append(class_index(data['puppet_classes']))
-
- data['data_types']
-
- content.append(type_aliases_index(data['data_type_aliases']))
-
- content.append(defined_types_index(data['defined_types']))
-
- data['resource_types']
- data['providers']
- data['puppet_functions']
- data['puppet_tasks']
- data['puppet_plans']
-
- with open(os.path.join(base, 'index.html'), 'w') as f:
- f.write(template.render(module_name=module.name,
- content=content))
-
-
-def setup_module(base: str, module: ModuleEntry) -> None:
- """
- Create all output files for a puppet module.
-
- Will generate a directory under base for the module.
- """
- path = os.path.join(base, module.name)
- pathlib.Path(path).mkdir(exist_ok=True)
- if not module.strings_output:
- return
- data = json.loads(module.strings_output)
-
- setup_module_index(path, module, data)
-
- for puppet_class in data['puppet_classes'] + data['defined_types']:
- # localpath = puppet_class['name'].split('::')
- localpath, _ = os.path.splitext(puppet_class['file'])
- dir = os.path.join(path, localpath)
- pathlib.Path(dir).mkdir(parents=True, exist_ok=True)
- # puppet_class['docstring']
- # puppet_class['defaults']
-
- # TODO option to add .txt extension (for web serverse which
- # treat .pp as application/binary)
- with open(os.path.join(dir, 'source.pp.txt'), 'w') as f:
- f.write(puppet_class['source'])
-
- with open(os.path.join(dir, 'source.json'), 'w') as f:
- json.dump(puppet_class, f, indent=2)
-
- # with open(os.path.join(dir, 'source.pp.html'), 'w') as f:
- # f.write(format_class(puppet_class))
-
- with open(os.path.join(dir, 'index.html'), 'w') as f:
- template = jinja.get_template('code_page.html')
- f.write(template.render(content=format_class(puppet_class)))
-
- # puppet_class['file']
- # puppet_class['line']
-
- for type_alias in data['data_type_aliases']:
- localpath, _ = os.path.splitext(type_alias['file'])
- dir = os.path.join(path, localpath)
- pathlib.Path(dir).mkdir(parents=True, exist_ok=True)
-
- with open(os.path.join(dir, 'source.pp.txt'), 'w') as f:
- f.write(type_alias['alias_of'])
-
- with open(os.path.join(dir, 'source.json'), 'w') as f:
- json.dump(type_alias, f, indent=2)
-
- template = jinja.get_template('code_page.html')
- with open(os.path.join(dir, 'index.html'), 'w') as f:
- f.write(template.render(content=format_type_alias(type_alias)))
-
- os.system("cp -r static output")
-
- # data['data_type_aliases']
- # data['defined_types']
- # data['resource_types']
-
def __main() -> None:
- modules = get_modules(env)
+ cache = Cache('/home/hugo/.cache/puppet-doc')
+ modules = get_modules(cache, env)
setup_index('output', modules)
for module in modules:
diff --git a/muppet/gather.py b/muppet/gather.py
new file mode 100644
index 0000000..d7a6645
--- /dev/null
+++ b/muppet/gather.py
@@ -0,0 +1,103 @@
+"""
+Methods for gathering data.
+
+Gathers information about all puppet modules, including which are
+present in our environment, their metadata, and their output of
+``puppet strings``.
+"""
+
+from dataclasses import dataclass
+from typing import (
+ Any,
+)
+import json
+import os.path
+import hashlib
+from .puppet.strings import puppet_strings
+from .cache import Cache
+
+
+@dataclass
+class ModuleEntry:
+ """
+ One entry in a module.
+
+ Parameters:
+ name - local name of the module, should always be the basename
+ of path
+ path - Absolute path in the filesystem where the module can be
+ found.
+ strings_output - output of `puppet strings`.
+ """
+
+ name: str
+ path: str
+ strings_output: bytes
+ metadata: dict[str, Any]
+
+ def file(self, path: str) -> str:
+ """Return the absolute path of a path inside the module."""
+ return os.path.join(self.path, path)
+
+
+def get_puppet_strings(cache: Cache, path: str) -> bytes:
+ """
+ Run puppet string, but check cache first.
+
+ The cache uses the contents of metadata.json as its key,
+ so any updates without an updated metadata.json wont't be
+ detected.
+
+ Hashing the entire contents of the module was tested, but was to
+ slow.
+ """
+ try:
+ with open(os.path.join(path, 'metadata.json'), 'rb') as f:
+ data = f.read()
+ key = 'puppet-strings' + hashlib.sha1(data).hexdigest()
+ if parsed := cache.get(key):
+ result = parsed
+ else:
+ result = puppet_strings(path)
+ cache.put(key, result)
+ return result
+ except FileNotFoundError:
+ # TODO actually run puppet strings again.
+ # This is just since without a metadata.json we always get a
+ # cache miss, which is slow.
+ # return puppet_strings(path)
+ return b''
+
+ # try:
+ # with open(module.file('.git/FETCH_HEAD')) as f:
+ # st = os.stat(f.fileno())
+ # st.st_mtime
+ # except FileNotFoundError:
+ # pass
+
+
+def get_modules(cache: Cache, dir: str) -> list[ModuleEntry]:
+ """
+ Enumerate modules in directory.
+
+ The directory should be the modules subdirectory of an environment,
+ e.g. /etc/puppetlabs/code/environments/production/modules.
+ """
+ modules: list[ModuleEntry] = []
+
+ for entry in sorted(list(os.scandir(dir)), key=lambda d: d.name):
+ # TODO Logging
+ # print('- entry', entry, file=sys.stderr)
+ name = entry.name
+ path = os.path.join(dir, entry)
+ strings_data = get_puppet_strings(cache, path)
+
+ try:
+ with open(os.path.join(path, 'metadata.json')) as f:
+ metadata = json.load(f)
+ except FileNotFoundError:
+ metadata = {}
+
+ modules.append(ModuleEntry(name, path, strings_data, metadata))
+
+ return modules
diff --git a/muppet/output.py b/muppet/output.py
new file mode 100644
index 0000000..e5dca8c
--- /dev/null
+++ b/muppet/output.py
@@ -0,0 +1,270 @@
+"""
+Functions for actually generating output.
+
+Both generates output strings, and writes them to disk.
+"""
+
+import os
+import os.path
+import pathlib
+import json
+from .gather import ModuleEntry
+from jinja2 import (
+ Environment,
+ FileSystemLoader,
+)
+from .lookup import lookup, Ref
+from commonmark import commonmark
+from .format import (
+ format_class,
+ format_type_alias,
+)
+from typing import (
+ Any,
+ TypedDict,
+ NotRequired,
+)
+from collections.abc import (
+ Iterable,
+)
+from .util import group_by
+from .puppet.strings import isprivate
+
+
+pathlib.Path('output').mkdir(exist_ok=True)
+jinja = Environment(
+ loader=FileSystemLoader('templates'),
+ autoescape=False,
+)
+
+
+def setup_index(base: str, modules: list[ModuleEntry]) -> None:
+ """Create the main index.html file."""
+ template = jinja.get_template('index.html')
+ with open(os.path.join(base, 'index.html'), 'w') as f:
+ f.write(template.render(modules=modules))
+
+
+class IndexItem(TypedDict):
+ """A single list entry in a module index page."""
+
+ name: str
+ file: str
+ summary: NotRequired[str]
+
+
+class IndexSubcategory(TypedDict):
+ """A subheading on an index page."""
+
+ title: str
+ list: Iterable[IndexItem]
+
+
+class IndexCategory(TypedDict):
+ """A top heading on an index page."""
+
+ title: str
+ list: Iterable[IndexSubcategory]
+
+
+def class_index(class_list: list) -> IndexCategory:
+ """Prepage class index list."""
+ groups = group_by(isprivate, class_list)
+
+ lst: list[IndexSubcategory] = []
+
+ if publics := groups.get(False):
+ # print(publics[0]['docstring']['tags'])
+ sublist: list[IndexItem] = []
+ for i in publics:
+ name = i['name']
+ summary = lookup(i) \
+ .ref('docstring') \
+ .ref('tags') \
+ .find(Ref('tag_name') == 'summary') \
+ .ref('text') \
+ .value()
+
+ obj: IndexItem = {
+ 'file': os.path.splitext(i['file'])[0],
+ 'name': name,
+ }
+
+ if summary:
+ obj['summary'] = commonmark(summary)
+
+ sublist.append(obj)
+
+ lst.append({
+ 'title': 'Public Classes',
+ 'list': sublist,
+ })
+
+ if privates := groups.get(True):
+ lst.append({
+ 'title': 'Private Classes',
+ 'list': ({'name': i['name'],
+ 'file': os.path.splitext(i['file'])[0]}
+ for i in privates),
+ })
+
+ return {
+ 'title': 'Classes',
+ 'list': lst
+ }
+
+
+def defined_types_index(defined_list: list) -> IndexCategory:
+ """
+ Prepare defined types index list.
+
+ These are puppet types introduces by puppet code.
+ Each only has one implemenattion.
+ """
+ groups = group_by(isprivate, defined_list)
+
+ lst: list[IndexSubcategory] = []
+
+ if publics := groups.get(False):
+ lst.append({
+ 'title': 'Public Defined Types',
+ 'list': ({'name': i['name'],
+ 'file': os.path.splitext(i['file'])[0]}
+ for i in publics),
+ })
+
+ if privates := groups.get(True):
+ lst.append({
+ 'title': 'Private Defined Types',
+ 'list': ({'name': i['name'],
+ 'file': os.path.splitext(i['file'])[0]}
+ for i in privates),
+ })
+
+ return {
+ 'title': 'Defined Types',
+ 'list': lst
+ }
+
+
+def type_aliases_index(alias_list: list) -> IndexCategory:
+ """Prepare type alias index list."""
+ groups = group_by(isprivate, alias_list)
+ lst: list[IndexSubcategory] = []
+ if publics := groups.get(False):
+ lst.append({
+ 'title': 'Public Type Aliases',
+ 'list': ({'name': i['name'],
+ 'file': os.path.splitext(i['file'])[0]}
+ for i in publics),
+ })
+
+ if privates := groups.get(True):
+ lst.append({
+ 'title': 'Private Type Aliases',
+ 'list': ({'name': i['name'],
+ 'file': os.path.splitext(i['file'])[0]}
+ for i in privates),
+ })
+
+ return {
+ 'title': 'Type Aliases',
+ 'list': lst,
+ }
+
+
+# def resource_types_index(resource_list: list) -> IndexCategory:
+# """
+# Prepare resource type index list.
+#
+# These are the resource types introduced through ruby. Each can
+# have multiple implementations.
+# """
+# return {}
+
+
+def setup_module_index(base: str, module: ModuleEntry, data: dict[str, Any]) -> None:
+ """Create the index file for a specific module."""
+ template = jinja.get_template('module_index.html')
+
+ content = []
+
+ content.append(class_index(data['puppet_classes']))
+
+ data['data_types']
+
+ content.append(type_aliases_index(data['data_type_aliases']))
+
+ content.append(defined_types_index(data['defined_types']))
+
+ data['resource_types']
+ data['providers']
+ data['puppet_functions']
+ data['puppet_tasks']
+ data['puppet_plans']
+
+ with open(os.path.join(base, 'index.html'), 'w') as f:
+ f.write(template.render(module_name=module.name,
+ content=content))
+
+
+def setup_module(base: str, module: ModuleEntry) -> None:
+ """
+ Create all output files for a puppet module.
+
+ Will generate a directory under base for the module.
+ """
+ path = os.path.join(base, module.name)
+ pathlib.Path(path).mkdir(exist_ok=True)
+ if not module.strings_output:
+ return
+ data = json.loads(module.strings_output)
+
+ setup_module_index(path, module, data)
+
+ for puppet_class in data['puppet_classes'] + data['defined_types']:
+ # localpath = puppet_class['name'].split('::')
+ localpath, _ = os.path.splitext(puppet_class['file'])
+ dir = os.path.join(path, localpath)
+ pathlib.Path(dir).mkdir(parents=True, exist_ok=True)
+ # puppet_class['docstring']
+ # puppet_class['defaults']
+
+ # TODO option to add .txt extension (for web serverse which
+ # treat .pp as application/binary)
+ with open(os.path.join(dir, 'source.pp.txt'), 'w') as f:
+ f.write(puppet_class['source'])
+
+ with open(os.path.join(dir, 'source.json'), 'w') as f:
+ json.dump(puppet_class, f, indent=2)
+
+ # with open(os.path.join(dir, 'source.pp.html'), 'w') as f:
+ # f.write(format_class(puppet_class))
+
+ with open(os.path.join(dir, 'index.html'), 'w') as f:
+ template = jinja.get_template('code_page.html')
+ f.write(template.render(content=format_class(puppet_class)))
+
+ # puppet_class['file']
+ # puppet_class['line']
+
+ for type_alias in data['data_type_aliases']:
+ localpath, _ = os.path.splitext(type_alias['file'])
+ dir = os.path.join(path, localpath)
+ pathlib.Path(dir).mkdir(parents=True, exist_ok=True)
+
+ with open(os.path.join(dir, 'source.pp.txt'), 'w') as f:
+ f.write(type_alias['alias_of'])
+
+ with open(os.path.join(dir, 'source.json'), 'w') as f:
+ json.dump(type_alias, f, indent=2)
+
+ template = jinja.get_template('code_page.html')
+ with open(os.path.join(dir, 'index.html'), 'w') as f:
+ f.write(template.render(content=format_type_alias(type_alias)))
+
+ os.system("cp -r static output")
+
+ # data['data_type_aliases']
+ # data['defined_types']
+ # data['resource_types']
diff --git a/muppet/puppet/strings.py b/muppet/puppet/strings.py
index f308985..0f4930d 100644
--- a/muppet/puppet/strings.py
+++ b/muppet/puppet/strings.py
@@ -1,6 +1,7 @@
"""Python wrapper around puppet strings."""
import subprocess
+from typing import Any
def puppet_strings(path: str) -> bytes:
@@ -13,3 +14,21 @@ def puppet_strings(path: str) -> bytes:
check=True,
stdout=subprocess.PIPE)
return cmd.stdout
+
+
+def isprivate(entry: dict[str, Any]) -> bool:
+ """
+ Is the given puppet declaration marked private.
+
+ Assumes input is a dictionary as returned by puppet strings, one
+ of the entries in (for example) 'puppet_classes'.
+
+ Currently only checks for an "@api private" tag.
+ """
+ if ds := entry.get('docstring'):
+ if tags := ds.get('tags'):
+ for tag in tags:
+ if tag.get('tag_name') == 'api' and \
+ tag.get('text') == 'private':
+ return True
+ return False
diff --git a/muppet/util.py b/muppet/util.py
new file mode 100644
index 0000000..c12d15d
--- /dev/null
+++ b/muppet/util.py
@@ -0,0 +1,28 @@
+"""Various misc. utilities."""
+
+from typing import (
+ TypeVar,
+ Callable,
+)
+
+from collections.abc import (
+ Sequence,
+)
+
+
+T = TypeVar('T')
+U = TypeVar('U')
+
+
+def group_by(proc: Callable[[T], U], seq: Sequence[T]) -> dict[U, list[T]]:
+ """
+ Group elements in seq by proc.
+
+ Return a dictionary mapping the result of proc onto lists of each
+ element which evaluated to that key.
+ """
+ d: dict[U, list[T]] = {}
+ for item in seq:
+ key = proc(item)
+ d[key] = (d.get(key) or []) + [item]
+ return d
diff --git a/templates/index.html b/templates/index.html
index 8703117..241a8d1 100644
--- a/templates/index.html
+++ b/templates/index.html
@@ -14,6 +14,7 @@
</style>
</head>
<body>
+ <h1>Muppet Strings</h1>
<ul>
{% for module in modules %}
<li>