1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
|
# Run a single "find" pass to get a list of all files (with the .git
# directory excluded), then filter out what we need.
ALL_FILES := $(shell find . -name .git -prune -o -type f -print)
XMLS := $(filter %/text.xml,$(ALL_FILES))
SVGS := $(filter %.svg,$(ALL_FILES))
HTMLS := $(subst text.xml,index.html,$(XMLS))
ECLASS_HTMLS := $(filter ./eclass-reference/%/index.html,$(ALL_FILES))
IMAGES := $(patsubst %.svg,%.png,$(SVGS))
CSS_FILES = devmanual.css offline.css
JS_FILES = search.js documents.js
prefix = /usr/local/share
docdir = $(prefix)/doc/devmanual
htmldir = $(docdir)
DESTDIR =
# Nonzero value disables external assets for offline browsing.
OFFLINE = 0
all: prereq validate build documents.js
prereq:
@type rsvg-convert >/dev/null 2>&1 || \
{ echo "gnome-base/librsvg required" >&2;\
exit 1; }
@type xsltproc >/dev/null 2>&1 || \
{ echo "dev-libs/libxslt is with python required" >&2;\
exit 1; }
@type xmllint >/dev/null 2>&1 || \
{ echo "dev-libs/libxml2 is required" >&2;\
exit 1; }
@fc-list -q "Open Sans" || \
{ echo "media-fonts/open-sans is required" >&2;\
exit 1; }
build: $(HTMLS) $(IMAGES)
# We need to parse all the XMLs every time, not just the ones
# that are newer than the target. This is because each search
# document in devmanual gets a unique ID, which is used to
# quickly tie search matches to the corresponding documents.
documents.js: bin/build_search_documents.py $(XMLS)
@python3 bin/build_search_documents.py $(XMLS) > $@ && echo "$@ built"
%.png : %.svg
rsvg-convert --output=$@ $<
# Secondary expansion allows us to use the automatic variable $@ in
# the prerequisites.
#
# We use the pattern %.html rather than the more-sensible %index.html
# because the latter doesn't match our top-level index.html target.
#
.SECONDEXPANSION:
%.html: $$(dir $$@)text.xml devbook.xsl xsl/*.xsl
xsltproc --param offline "$(OFFLINE)" devbook.xsl $< > $@
# Each HTML file must depend on its XML file with all its descendants
# (for the contents tree), all its ancestors (for breadcrumbs), and
# the previous and next documents (for backward and forward links).
# Generate the list of dependencies with XSLT, which appears to be a
# better tool for this than make.
.depend: $(XMLS) depend.xsl devbook.xsl
@xsltproc depend.xsl $(XMLS) | sed ':x;s%[^ /]*/\.\./%%;tx' > $@
install: all
set -e; \
for file in $(HTMLS) $(ECLASS_HTMLS) $(IMAGES); do \
install -d "$(DESTDIR)$(htmldir)"/$${file%/*}; \
install -m 644 $${file} "$(DESTDIR)$(htmldir)"/$${file}; \
done
install -m 644 $(CSS_FILES) "$(DESTDIR)$(htmldir)"/
if test $(OFFLINE) -eq 0; then \
install -m 644 $(JS_FILES) "$(DESTDIR)$(htmldir)"/; \
fi
validate:
@xmllint --noout --dtdvalid devbook.dtd $(XMLS) \
&& echo "xmllint validation successful"
# Run app-text/tidy-html5 on the output to detect mistakes.
# We have to loop through them because otherwise tidy won't
# tell you which file contains a mistake.
tidy: $(HTMLS) $(ECLASS_HTMLS)
@status=0; \
for f in $^; do \
output=$$(tidy -q -errors --drop-empty-elements no $${f} 2>&1) \
|| { status=$$?; echo "Failed on $${f}:"; echo "$${output}"; }; \
done; \
test $${status} -eq 0 && echo "tidy validation successful"; \
exit $${status}
clean:
@rm -f $(HTMLS) $(IMAGES) documents.js .depend
.PHONY: all prereq build install validate tidy clean
-include .depend
|