summaryrefslogtreecommitdiff
path: root/repo/www/make-deps.py
blob: 189ccfdd79021c83e46cf7ff9428b7912a76f7ce (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#!/usr/bin/env python3

"""Write dependencies for all website pages in makefile syntax.

We want to compute:

- a list of leaf pages,
- a list of auto-generated indices,

- dependencies for leaf pages:
  OUTPUT/foo/bar.html: foo/bar.txt | OUTPUT/foo

    - special case for READMEs:
      OUTPUT/foo/index.html: foo/README.txt foo | OUTPUT/foo

- dependencies for auto-generated indices:
  OUTPUT/foo/index.html: foo | OUTPUT/foo
"""

from collections import defaultdict
from dataclasses import dataclass, field
from os import path
from subprocess import run
from sys import argv, exit
from typing import List, Set


@dataclass
class Directory:
    files: List[str] = field(default_factory=list)
    subfolders: Set[str] = field(default_factory=set)


def parse_arguments(args):
    if len(args) != 4:
        exit(f'Usage: {argv[0]} EXTENSIONS TOP-DIR OUTPUT-DIR')

    return argv[1].split(), argv[2], argv[3]


def join(collections, joiner):
    out = []

    for c in collections[:-1]:
        out.extend(c)
        out.append(joiner)

    out.extend(collections[-1])
    return tuple(out)


def find_sources(extensions, top_dir):
    filters = tuple(('-name', '*.'+ext) for ext in extensions)

    p = run(
        # TODO: use git ls-files
        ('find', top_dir) + join(filters, '-o'),
        capture_output=True, check=True, text=True
    )

    return p.stdout.splitlines()


def compute_directories(files, top_dir):
    directories = defaultdict(Directory)

    for f in files:
        fpath, fname = path.split(f)
        fdir = (
            path.relpath(fpath, top_dir)
            if fpath != top_dir
            else ''
        )

        directories[fdir].files.append(fname)

        if fdir:
            parent, child = path.split(fdir)
            directories[parent].subfolders.add(child)

    return directories


def write_dependencies(deps, directories, top_dir, out_dir):
    pages = list()

    for dpath, d in directories.items():

        for f in d.files:
            name, _ = path.splitext(f)
            if name == 'README':
                name = 'index'

            html_dir = (
                path.join(out_dir, dpath)
                if dpath
                else out_dir
            )

            html_path = path.join(html_dir, name+'.html')
            src_path = path.join(top_dir, dpath, f)

            print(f'{html_path}: {src_path} | {html_dir}', file=deps)
            pages.append(html_path)

    print(file=deps)
    print(f'pages = {" ".join(pages)}', file=deps)


def main(arguments):
    extensions, top_dir, out_dir = parse_arguments(arguments)
    source_files = find_sources(extensions, top_dir)

    directories = compute_directories(source_files, top_dir)

    with open('deps.mk', 'w') as deps:
        write_dependencies(deps, directories, top_dir, out_dir)


if __name__ == '__main__':
    main(argv)