forked from NixOS/nixpkgs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathasciidoc.py
216 lines (204 loc) · 9.3 KB
/
asciidoc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import cast
from urllib.parse import quote
from .md import Renderer
from markdown_it.token import Token
_asciidoc_escapes = {
# escape all dots, just in case one is pasted at SOL
ord('.'): "{zwsp}.",
# may be replaced by typographic variants
ord("'"): "{apos}",
ord('"'): "{quot}",
# passthrough character
ord('+'): "{plus}",
# table marker
ord('|'): "{vbar}",
# xml entity reference
ord('&'): "{amp}",
# crossrefs. < needs extra escaping because links break in odd ways if they start with it
ord('<'): "{zwsp}+<+{zwsp}",
ord('>'): "{gt}",
# anchors, links, block attributes
ord('['): "{startsb}",
ord(']'): "{endsb}",
# superscript, subscript
ord('^'): "{caret}",
ord('~'): "{tilde}",
# bold
ord('*'): "{asterisk}",
# backslash
ord('\\'): "{backslash}",
# inline code
ord('`'): "{backtick}",
}
def asciidoc_escape(s: str) -> str:
s = s.translate(_asciidoc_escapes)
# :: is deflist item, ;; is has a replacement but no idea why
return s.replace("::", "{two-colons}").replace(";;", "{two-semicolons}")
@dataclass(kw_only=True)
class List:
head: str
@dataclass()
class Par:
sep: str
block_delim: str
continuing: bool = False
class AsciiDocRenderer(Renderer):
__output__ = "asciidoc"
_parstack: list[Par]
_list_stack: list[List]
_attrspans: list[str]
def __init__(self, manpage_urls: Mapping[str, str]):
super().__init__(manpage_urls)
self._parstack = [ Par("\n\n", "====") ]
self._list_stack = []
self._attrspans = []
def _enter_block(self, is_list: bool) -> None:
self._parstack.append(Par("\n+\n" if is_list else "\n\n", self._parstack[-1].block_delim + "="))
def _leave_block(self) -> None:
self._parstack.pop()
def _break(self, force: bool = False) -> str:
result = self._parstack[-1].sep if force or self._parstack[-1].continuing else ""
self._parstack[-1].continuing = True
return result
def _admonition_open(self, kind: str) -> str:
pbreak = self._break()
self._enter_block(False)
return f"{pbreak}[{kind}]\n{self._parstack[-2].block_delim}\n"
def _admonition_close(self) -> str:
self._leave_block()
return f"\n{self._parstack[-1].block_delim}\n"
def _list_open(self, token: Token, head: str) -> str:
attrs = []
if (idx := token.attrs.get('start')) is not None:
attrs.append(f"start={idx}")
if token.meta['compact']:
attrs.append('options="compact"')
if self._list_stack:
head *= len(self._list_stack[0].head) + 1
self._list_stack.append(List(head=head))
return f"{self._break()}[{','.join(attrs)}]"
def _list_close(self) -> str:
self._list_stack.pop()
return ""
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return asciidoc_escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " +\n"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " "
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"``{asciidoc_escape(token.content)}``"
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self.fence(token, tokens, i)
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}["
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "]"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
# allow the next token to be a block or an inline.
return f'\n{self._list_stack[-1].head} {{empty}}'
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '*')
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
attrs = f"[source,{token.info}]\n" if token.info else ""
code = token.content
if code.endswith('\n'):
code = code[:-1]
return f"{self._break(True)}{attrs}----\n{code}\n----"
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._break(True)
self._enter_block(False)
return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n"
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return f"\n{self._parstack[-1].block_delim}"
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("NOTE")
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("CAUTION")
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("IMPORTANT")
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("TIP")
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("WARNING")
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{self._break()}[]"
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
return ":: {empty}"
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = asciidoc_escape(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"link:{quote(url, safe='/:')}[{content}]"
return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``"
def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"[[{token.attrs['id']}]]"
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
(id_part, class_part) = ("", "")
if id := token.attrs.get('id'):
id_part = f"[[{id}]]"
if s := token.attrs.get('class'):
if s == 'keycap':
class_part = "kbd:["
self._attrspans.append("]")
else:
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup.replace("#", "=") + " "
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '.')
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()