]> git.cameronkatri.com Git - mandoc.git/blob - man_macro.c
Clean-up in libman: make all calls to man_*msg not check return value.
[mandoc.git] / man_macro.c
1 /* $Id: man_macro.c,v 1.56 2011/03/17 11:56:17 kristaps Exp $ */
2 /*
3 * Copyright (c) 2008, 2009, 2010 Kristaps Dzonsons <kristaps@bsd.lv>
4 *
5 * Permission to use, copy, modify, and distribute this software for any
6 * purpose with or without fee is hereby granted, provided that the above
7 * copyright notice and this permission notice appear in all copies.
8 *
9 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 */
17 #ifdef HAVE_CONFIG_H
18 #include "config.h"
19 #endif
20
21 #include <assert.h>
22 #include <ctype.h>
23 #include <stdlib.h>
24 #include <string.h>
25
26 #include "mandoc.h"
27 #include "libman.h"
28
29 enum rew {
30 REW_REWIND,
31 REW_NOHALT,
32 REW_HALT
33 };
34
35 static int blk_close(MACRO_PROT_ARGS);
36 static int blk_exp(MACRO_PROT_ARGS);
37 static int blk_imp(MACRO_PROT_ARGS);
38 static int in_line_eoln(MACRO_PROT_ARGS);
39
40 static int rew_scope(enum man_type,
41 struct man *, enum mant);
42 static enum rew rew_dohalt(enum mant, enum man_type,
43 const struct man_node *);
44 static enum rew rew_block(enum mant, enum man_type,
45 const struct man_node *);
46 static void rew_warn(struct man *,
47 struct man_node *, enum mandocerr);
48
49 const struct man_macro __man_macros[MAN_MAX] = {
50 { in_line_eoln, MAN_NSCOPED }, /* br */
51 { in_line_eoln, 0 }, /* TH */
52 { blk_imp, MAN_SCOPED }, /* SH */
53 { blk_imp, MAN_SCOPED }, /* SS */
54 { blk_imp, MAN_SCOPED | MAN_FSCOPED }, /* TP */
55 { blk_imp, 0 }, /* LP */
56 { blk_imp, 0 }, /* PP */
57 { blk_imp, 0 }, /* P */
58 { blk_imp, 0 }, /* IP */
59 { blk_imp, 0 }, /* HP */
60 { in_line_eoln, MAN_SCOPED }, /* SM */
61 { in_line_eoln, MAN_SCOPED }, /* SB */
62 { in_line_eoln, 0 }, /* BI */
63 { in_line_eoln, 0 }, /* IB */
64 { in_line_eoln, 0 }, /* BR */
65 { in_line_eoln, 0 }, /* RB */
66 { in_line_eoln, MAN_SCOPED }, /* R */
67 { in_line_eoln, MAN_SCOPED }, /* B */
68 { in_line_eoln, MAN_SCOPED }, /* I */
69 { in_line_eoln, 0 }, /* IR */
70 { in_line_eoln, 0 }, /* RI */
71 { in_line_eoln, MAN_NSCOPED }, /* na */
72 { in_line_eoln, MAN_NSCOPED }, /* sp */
73 { in_line_eoln, 0 }, /* nf */
74 { in_line_eoln, 0 }, /* fi */
75 { blk_close, 0 }, /* RE */
76 { blk_exp, MAN_EXPLICIT }, /* RS */
77 { in_line_eoln, 0 }, /* DT */
78 { in_line_eoln, 0 }, /* UC */
79 { in_line_eoln, 0 }, /* PD */
80 { in_line_eoln, 0 }, /* AT */
81 { in_line_eoln, 0 }, /* in */
82 { in_line_eoln, 0 }, /* ft */
83 };
84
85 const struct man_macro * const man_macros = __man_macros;
86
87
88 /*
89 * Warn when "n" is an explicit non-roff macro.
90 */
91 static void
92 rew_warn(struct man *m, struct man_node *n, enum mandocerr er)
93 {
94
95 if (er == MANDOCERR_MAX || MAN_BLOCK != n->type)
96 return;
97 if (MAN_VALID & n->flags)
98 return;
99 if ( ! (MAN_EXPLICIT & man_macros[n->tok].flags))
100 return;
101
102 assert(er < MANDOCERR_FATAL);
103 man_nmsg(m, n, er);
104 }
105
106
107 /*
108 * Rewind scope. If a code "er" != MANDOCERR_MAX has been provided, it
109 * will be used if an explicit block scope is being closed out.
110 */
111 int
112 man_unscope(struct man *m, const struct man_node *to,
113 enum mandocerr er)
114 {
115 struct man_node *n;
116
117 assert(to);
118
119 /* LINTED */
120 while (m->last != to) {
121 /*
122 * Save the parent here, because we may delete the
123 * m->last node in the post-validation phase and reset
124 * it to m->last->parent, causing a step in the closing
125 * out to be lost.
126 */
127 n = m->last->parent;
128 rew_warn(m, m->last, er);
129 if ( ! man_valid_post(m))
130 return(0);
131 m->last = n;
132 assert(m->last);
133 }
134
135 rew_warn(m, m->last, er);
136 if ( ! man_valid_post(m))
137 return(0);
138
139 m->next = MAN_ROOT == m->last->type ?
140 MAN_NEXT_CHILD : MAN_NEXT_SIBLING;
141
142 return(1);
143 }
144
145
146 static enum rew
147 rew_block(enum mant ntok, enum man_type type, const struct man_node *n)
148 {
149
150 if (MAN_BLOCK == type && ntok == n->parent->tok &&
151 MAN_BODY == n->parent->type)
152 return(REW_REWIND);
153 return(ntok == n->tok ? REW_HALT : REW_NOHALT);
154 }
155
156
157 /*
158 * There are three scope levels: scoped to the root (all), scoped to the
159 * section (all less sections), and scoped to subsections (all less
160 * sections and subsections).
161 */
162 static enum rew
163 rew_dohalt(enum mant tok, enum man_type type, const struct man_node *n)
164 {
165 enum rew c;
166
167 /* We cannot progress beyond the root ever. */
168 if (MAN_ROOT == n->type)
169 return(REW_HALT);
170
171 assert(n->parent);
172
173 /* Normal nodes shouldn't go to the level of the root. */
174 if (MAN_ROOT == n->parent->type)
175 return(REW_REWIND);
176
177 /* Already-validated nodes should be closed out. */
178 if (MAN_VALID & n->flags)
179 return(REW_NOHALT);
180
181 /* First: rewind to ourselves. */
182 if (type == n->type && tok == n->tok)
183 return(REW_REWIND);
184
185 /*
186 * Next follow the implicit scope-smashings as defined by man.7:
187 * section, sub-section, etc.
188 */
189
190 switch (tok) {
191 case (MAN_SH):
192 break;
193 case (MAN_SS):
194 /* Rewind to a section, if a block. */
195 if (REW_NOHALT != (c = rew_block(MAN_SH, type, n)))
196 return(c);
197 break;
198 case (MAN_RS):
199 /* Rewind to a subsection, if a block. */
200 if (REW_NOHALT != (c = rew_block(MAN_SS, type, n)))
201 return(c);
202 /* Rewind to a section, if a block. */
203 if (REW_NOHALT != (c = rew_block(MAN_SH, type, n)))
204 return(c);
205 break;
206 default:
207 /* Rewind to an offsetter, if a block. */
208 if (REW_NOHALT != (c = rew_block(MAN_RS, type, n)))
209 return(c);
210 /* Rewind to a subsection, if a block. */
211 if (REW_NOHALT != (c = rew_block(MAN_SS, type, n)))
212 return(c);
213 /* Rewind to a section, if a block. */
214 if (REW_NOHALT != (c = rew_block(MAN_SH, type, n)))
215 return(c);
216 break;
217 }
218
219 return(REW_NOHALT);
220 }
221
222
223 /*
224 * Rewinding entails ascending the parse tree until a coherent point,
225 * for example, the `SH' macro will close out any intervening `SS'
226 * scopes. When a scope is closed, it must be validated and actioned.
227 */
228 static int
229 rew_scope(enum man_type type, struct man *m, enum mant tok)
230 {
231 struct man_node *n;
232 enum rew c;
233
234 /* LINTED */
235 for (n = m->last; n; n = n->parent) {
236 /*
237 * Whether we should stop immediately (REW_HALT), stop
238 * and rewind until this point (REW_REWIND), or keep
239 * rewinding (REW_NOHALT).
240 */
241 c = rew_dohalt(tok, type, n);
242 if (REW_HALT == c)
243 return(1);
244 if (REW_REWIND == c)
245 break;
246 }
247
248 /*
249 * Rewind until the current point. Warn if we're a roff
250 * instruction that's mowing over explicit scopes.
251 */
252 assert(n);
253
254 return(man_unscope(m, n, MANDOCERR_MAX));
255 }
256
257
258 /*
259 * Close out a generic explicit macro.
260 */
261 /* ARGSUSED */
262 int
263 blk_close(MACRO_PROT_ARGS)
264 {
265 enum mant ntok;
266 const struct man_node *nn;
267
268 switch (tok) {
269 case (MAN_RE):
270 ntok = MAN_RS;
271 break;
272 default:
273 abort();
274 /* NOTREACHED */
275 }
276
277 for (nn = m->last->parent; nn; nn = nn->parent)
278 if (ntok == nn->tok)
279 break;
280
281 if (NULL == nn)
282 man_pmsg(m, line, ppos, MANDOCERR_NOSCOPE);
283
284 if ( ! rew_scope(MAN_BODY, m, ntok))
285 return(0);
286 if ( ! rew_scope(MAN_BLOCK, m, ntok))
287 return(0);
288
289 return(1);
290 }
291
292
293 /* ARGSUSED */
294 int
295 blk_exp(MACRO_PROT_ARGS)
296 {
297 int w, la;
298 char *p;
299
300 /*
301 * Close out prior scopes. "Regular" explicit macros cannot be
302 * nested, but we allow roff macros to be placed just about
303 * anywhere.
304 */
305
306 if ( ! rew_scope(MAN_BODY, m, tok))
307 return(0);
308 if ( ! rew_scope(MAN_BLOCK, m, tok))
309 return(0);
310
311 if ( ! man_block_alloc(m, line, ppos, tok))
312 return(0);
313 if ( ! man_head_alloc(m, line, ppos, tok))
314 return(0);
315
316 for (;;) {
317 la = *pos;
318 w = man_args(m, line, pos, buf, &p);
319
320 if (-1 == w)
321 return(0);
322 if (0 == w)
323 break;
324
325 if ( ! man_word_alloc(m, line, la, p))
326 return(0);
327 }
328
329 assert(m);
330 assert(tok != MAN_MAX);
331
332 if ( ! rew_scope(MAN_HEAD, m, tok))
333 return(0);
334 return(man_body_alloc(m, line, ppos, tok));
335 }
336
337
338
339 /*
340 * Parse an implicit-block macro. These contain a MAN_HEAD and a
341 * MAN_BODY contained within a MAN_BLOCK. Rules for closing out other
342 * scopes, such as `SH' closing out an `SS', are defined in the rew
343 * routines.
344 */
345 /* ARGSUSED */
346 int
347 blk_imp(MACRO_PROT_ARGS)
348 {
349 int w, la;
350 char *p;
351 struct man_node *n;
352
353 /* Close out prior scopes. */
354
355 if ( ! rew_scope(MAN_BODY, m, tok))
356 return(0);
357 if ( ! rew_scope(MAN_BLOCK, m, tok))
358 return(0);
359
360 /* Allocate new block & head scope. */
361
362 if ( ! man_block_alloc(m, line, ppos, tok))
363 return(0);
364 if ( ! man_head_alloc(m, line, ppos, tok))
365 return(0);
366
367 n = m->last;
368
369 /* Add line arguments. */
370
371 for (;;) {
372 la = *pos;
373 w = man_args(m, line, pos, buf, &p);
374
375 if (-1 == w)
376 return(0);
377 if (0 == w)
378 break;
379
380 if ( ! man_word_alloc(m, line, la, p))
381 return(0);
382 }
383
384 /* Close out head and open body (unless MAN_SCOPE). */
385
386 if (MAN_SCOPED & man_macros[tok].flags) {
387 /* If we're forcing scope (`TP'), keep it open. */
388 if (MAN_FSCOPED & man_macros[tok].flags) {
389 m->flags |= MAN_BLINE;
390 return(1);
391 } else if (n == m->last) {
392 m->flags |= MAN_BLINE;
393 return(1);
394 }
395 }
396
397 if ( ! rew_scope(MAN_HEAD, m, tok))
398 return(0);
399 return(man_body_alloc(m, line, ppos, tok));
400 }
401
402
403 /* ARGSUSED */
404 int
405 in_line_eoln(MACRO_PROT_ARGS)
406 {
407 int w, la;
408 char *p;
409 struct man_node *n;
410
411 if ( ! man_elem_alloc(m, line, ppos, tok))
412 return(0);
413
414 n = m->last;
415
416 for (;;) {
417 la = *pos;
418 w = man_args(m, line, pos, buf, &p);
419
420 if (-1 == w)
421 return(0);
422 if (0 == w)
423 break;
424 if ( ! man_word_alloc(m, line, la, p))
425 return(0);
426 }
427
428 /*
429 * If no arguments are specified and this is MAN_SCOPED (i.e.,
430 * next-line scoped), then set our mode to indicate that we're
431 * waiting for terms to load into our context.
432 */
433
434 if (n == m->last && MAN_SCOPED & man_macros[tok].flags) {
435 assert( ! (MAN_NSCOPED & man_macros[tok].flags));
436 m->flags |= MAN_ELINE;
437 return(1);
438 }
439
440 /* Set ignorable context, if applicable. */
441
442 if (MAN_NSCOPED & man_macros[tok].flags) {
443 assert( ! (MAN_SCOPED & man_macros[tok].flags));
444 m->flags |= MAN_ILINE;
445 }
446
447 /*
448 * Rewind our element scope. Note that when TH is pruned, we'll
449 * be back at the root, so make sure that we don't clobber as
450 * its sibling.
451 */
452
453 for ( ; m->last; m->last = m->last->parent) {
454 if (m->last == n)
455 break;
456 if (m->last->type == MAN_ROOT)
457 break;
458 if ( ! man_valid_post(m))
459 return(0);
460 }
461
462 assert(m->last);
463
464 /*
465 * Same here regarding whether we're back at the root.
466 */
467
468 if (m->last->type != MAN_ROOT && ! man_valid_post(m))
469 return(0);
470
471 m->next = MAN_ROOT == m->last->type ?
472 MAN_NEXT_CHILD : MAN_NEXT_SIBLING;
473
474 return(1);
475 }
476
477
478 int
479 man_macroend(struct man *m)
480 {
481
482 return(man_unscope(m, m->first, MANDOCERR_SCOPEEXIT));
483 }
484