Annotation of embedaddon/ntp/ntpd/keyword-gen.c, revision 1.1.1.1
1.1 misho 1: /*
2: * keyword-gen.c -- generate keyword scanner finite state machine and
3: * keyword_text array.
4: * This program is run to generate ntp_keyword.h
5: */
6: #include <config.h>
7: #include <stdio.h>
8: #include <stdlib.h>
9: #include <time.h>
10:
11: #include <ntp_stdlib.h>
12: #include <ntp_config.h>
13: #include <lib_strbuf.h>
14: #include "ntp_scanner.h"
15: #include "ntp_parser.h"
16:
17:
18: #ifdef QSORT_USES_VOID_P
19: typedef const void * QSORTP;
20: #else
21: typedef char * QSORTP;
22: #endif
23:
24: /* Define a structure to hold a (keyword, token) pair */
25: struct key_tok {
26: char * key; /* Keyword */
27: int token; /* Associated Token */
28: follby followedby; /* nonzero indicates the next token(s)
29: forced to be string(s) */
30: };
31:
32: struct key_tok ntp_keywords[] = {
33: { "...", T_Ellipsis, FOLLBY_TOKEN },
34: { "automax", T_Automax, FOLLBY_TOKEN },
35: { "broadcast", T_Broadcast, FOLLBY_STRING },
36: { "broadcastclient", T_Broadcastclient, FOLLBY_TOKEN },
37: { "broadcastdelay", T_Broadcastdelay, FOLLBY_TOKEN },
38: { "calldelay", T_Calldelay, FOLLBY_TOKEN },
39: { "disable", T_Disable, FOLLBY_TOKEN },
40: { "driftfile", T_Driftfile, FOLLBY_STRING },
41: { "enable", T_Enable, FOLLBY_TOKEN },
42: { "end", T_End, FOLLBY_TOKEN },
43: { "filegen", T_Filegen, FOLLBY_TOKEN },
44: { "fudge", T_Fudge, FOLLBY_STRING },
45: { "includefile", T_Includefile, FOLLBY_STRING },
46: { "leapfile", T_Leapfile, FOLLBY_STRING },
47: { "logconfig", T_Logconfig, FOLLBY_STRINGS_TO_EOC },
48: { "logfile", T_Logfile, FOLLBY_STRING },
49: { "manycastclient", T_Manycastclient, FOLLBY_STRING },
50: { "manycastserver", T_Manycastserver, FOLLBY_STRINGS_TO_EOC },
51: { "multicastclient", T_Multicastclient, FOLLBY_STRINGS_TO_EOC },
52: { "peer", T_Peer, FOLLBY_STRING },
53: { "phone", T_Phone, FOLLBY_STRINGS_TO_EOC },
54: { "pidfile", T_Pidfile, FOLLBY_STRING },
55: { "pool", T_Pool, FOLLBY_STRING },
56: { "discard", T_Discard, FOLLBY_TOKEN },
57: { "restrict", T_Restrict, FOLLBY_TOKEN },
58: { "server", T_Server, FOLLBY_STRING },
59: { "setvar", T_Setvar, FOLLBY_STRING },
60: { "statistics", T_Statistics, FOLLBY_TOKEN },
61: { "statsdir", T_Statsdir, FOLLBY_STRING },
62: { "tick", T_Tick, FOLLBY_TOKEN },
63: { "tinker", T_Tinker, FOLLBY_TOKEN },
64: { "tos", T_Tos, FOLLBY_TOKEN },
65: { "trap", T_Trap, FOLLBY_STRING },
66: { "unconfig", T_Unconfig, FOLLBY_STRING },
67: { "unpeer", T_Unpeer, FOLLBY_STRING },
68: /* authentication_command */
69: { "controlkey", T_ControlKey, FOLLBY_TOKEN },
70: { "crypto", T_Crypto, FOLLBY_TOKEN },
71: { "keys", T_Keys, FOLLBY_STRING },
72: { "keysdir", T_Keysdir, FOLLBY_STRING },
73: { "ntpsigndsocket", T_NtpSignDsocket, FOLLBY_STRING },
74: { "requestkey", T_Requestkey, FOLLBY_TOKEN },
75: { "revoke", T_Revoke, FOLLBY_TOKEN },
76: { "trustedkey", T_Trustedkey, FOLLBY_TOKEN },
77: /* IPv4/IPv6 protocol override flag */
78: { "-4", T_Ipv4_flag, FOLLBY_TOKEN },
79: { "-6", T_Ipv6_flag, FOLLBY_TOKEN },
80: /* option */
81: { "autokey", T_Autokey, FOLLBY_TOKEN },
82: { "bias", T_Bias, FOLLBY_TOKEN },
83: { "burst", T_Burst, FOLLBY_TOKEN },
84: { "iburst", T_Iburst, FOLLBY_TOKEN },
85: { "key", T_Key, FOLLBY_TOKEN },
86: { "maxpoll", T_Maxpoll, FOLLBY_TOKEN },
87: { "minpoll", T_Minpoll, FOLLBY_TOKEN },
88: { "mode", T_Mode, FOLLBY_TOKEN },
89: { "noselect", T_Noselect, FOLLBY_TOKEN },
90: { "preempt", T_Preempt, FOLLBY_TOKEN },
91: { "true", T_True, FOLLBY_TOKEN },
92: { "prefer", T_Prefer, FOLLBY_TOKEN },
93: { "ttl", T_Ttl, FOLLBY_TOKEN },
94: { "version", T_Version, FOLLBY_TOKEN },
95: { "xleave", T_Xleave, FOLLBY_TOKEN },
96: /* crypto_command */
97: { "host", T_Host, FOLLBY_STRING },
98: { "ident", T_Ident, FOLLBY_STRING },
99: { "pw", T_Pw, FOLLBY_STRING },
100: { "randfile", T_Randfile, FOLLBY_STRING },
101: { "sign", T_Sign, FOLLBY_STRING },
102: { "digest", T_Digest, FOLLBY_STRING },
103: /*** MONITORING COMMANDS ***/
104: /* stat */
105: { "clockstats", T_Clockstats, FOLLBY_TOKEN },
106: { "cryptostats", T_Cryptostats, FOLLBY_TOKEN },
107: { "loopstats", T_Loopstats, FOLLBY_TOKEN },
108: { "peerstats", T_Peerstats, FOLLBY_TOKEN },
109: { "rawstats", T_Rawstats, FOLLBY_TOKEN },
110: { "sysstats", T_Sysstats, FOLLBY_TOKEN },
111: { "protostats", T_Protostats, FOLLBY_TOKEN },
112: { "timingstats", T_Timingstats, FOLLBY_TOKEN },
113: /* filegen_option */
114: { "file", T_File, FOLLBY_STRING },
115: { "link", T_Link, FOLLBY_TOKEN },
116: { "nolink", T_Nolink, FOLLBY_TOKEN },
117: { "type", T_Type, FOLLBY_TOKEN },
118: /* filegen_type */
119: { "age", T_Age, FOLLBY_TOKEN },
120: { "day", T_Day, FOLLBY_TOKEN },
121: { "month", T_Month, FOLLBY_TOKEN },
122: { "none", T_None, FOLLBY_TOKEN },
123: { "pid", T_Pid, FOLLBY_TOKEN },
124: { "week", T_Week, FOLLBY_TOKEN },
125: { "year", T_Year, FOLLBY_TOKEN },
126: /*** ORPHAN MODE COMMANDS ***/
127: /* tos_option */
128: { "minclock", T_Minclock, FOLLBY_TOKEN },
129: { "maxclock", T_Maxclock, FOLLBY_TOKEN },
130: { "minsane", T_Minsane, FOLLBY_TOKEN },
131: { "floor", T_Floor, FOLLBY_TOKEN },
132: { "ceiling", T_Ceiling, FOLLBY_TOKEN },
133: { "cohort", T_Cohort, FOLLBY_TOKEN },
134: { "mindist", T_Mindist, FOLLBY_TOKEN },
135: { "maxdist", T_Maxdist, FOLLBY_TOKEN },
136: { "beacon", T_Beacon, FOLLBY_TOKEN },
137: { "orphan", T_Orphan, FOLLBY_TOKEN },
138: /* access_control_flag */
139: { "default", T_Default, FOLLBY_TOKEN },
140: { "flake", T_Flake, FOLLBY_TOKEN },
141: { "ignore", T_Ignore, FOLLBY_TOKEN },
142: { "limited", T_Limited, FOLLBY_TOKEN },
143: { "mssntp", T_Mssntp, FOLLBY_TOKEN },
144: { "kod", T_Kod, FOLLBY_TOKEN },
145: { "lowpriotrap", T_Lowpriotrap, FOLLBY_TOKEN },
146: { "mask", T_Mask, FOLLBY_TOKEN },
147: { "nomodify", T_Nomodify, FOLLBY_TOKEN },
148: { "nopeer", T_Nopeer, FOLLBY_TOKEN },
149: { "noquery", T_Noquery, FOLLBY_TOKEN },
150: { "noserve", T_Noserve, FOLLBY_TOKEN },
151: { "notrap", T_Notrap, FOLLBY_TOKEN },
152: { "notrust", T_Notrust, FOLLBY_TOKEN },
153: { "ntpport", T_Ntpport, FOLLBY_TOKEN },
154: /* discard_option */
155: { "average", T_Average, FOLLBY_TOKEN },
156: { "minimum", T_Minimum, FOLLBY_TOKEN },
157: { "monitor", T_Monitor, FOLLBY_TOKEN },
158: /* fudge_factor */
159: { "flag1", T_Flag1, FOLLBY_TOKEN },
160: { "flag2", T_Flag2, FOLLBY_TOKEN },
161: { "flag3", T_Flag3, FOLLBY_TOKEN },
162: { "flag4", T_Flag4, FOLLBY_TOKEN },
163: { "refid", T_Refid, FOLLBY_STRING },
164: { "stratum", T_Stratum, FOLLBY_TOKEN },
165: { "time1", T_Time1, FOLLBY_TOKEN },
166: { "time2", T_Time2, FOLLBY_TOKEN },
167: /* system_option */
168: { "auth", T_Auth, FOLLBY_TOKEN },
169: { "bclient", T_Bclient, FOLLBY_TOKEN },
170: { "calibrate", T_Calibrate, FOLLBY_TOKEN },
171: { "kernel", T_Kernel, FOLLBY_TOKEN },
172: { "ntp", T_Ntp, FOLLBY_TOKEN },
173: { "stats", T_Stats, FOLLBY_TOKEN },
174: /* tinker_option */
175: { "step", T_Step, FOLLBY_TOKEN },
176: { "panic", T_Panic, FOLLBY_TOKEN },
177: { "dispersion", T_Dispersion, FOLLBY_TOKEN },
178: { "stepout", T_Stepout, FOLLBY_TOKEN },
179: { "allan", T_Allan, FOLLBY_TOKEN },
180: { "huffpuff", T_Huffpuff, FOLLBY_TOKEN },
181: { "freq", T_Freq, FOLLBY_TOKEN },
182: /* miscellaneous_command */
183: { "port", T_Port, FOLLBY_TOKEN },
184: { "interface", T_Interface, FOLLBY_TOKEN },
185: { "qos", T_Qos, FOLLBY_TOKEN },
186: { "saveconfigdir", T_Saveconfigdir, FOLLBY_STRING },
187: /* interface_command (ignore and interface already defined) */
188: { "nic", T_Nic, FOLLBY_TOKEN },
189: { "all", T_All, FOLLBY_TOKEN },
190: { "ipv4", T_Ipv4, FOLLBY_TOKEN },
191: { "ipv6", T_Ipv6, FOLLBY_TOKEN },
192: { "wildcard", T_Wildcard, FOLLBY_TOKEN },
193: { "listen", T_Listen, FOLLBY_TOKEN },
194: { "drop", T_Drop, FOLLBY_TOKEN },
195: /* simulator commands */
196: { "simulate", T_Simulate, FOLLBY_TOKEN },
197: { "simulation_duration",T_Sim_Duration, FOLLBY_TOKEN },
198: { "beep_delay", T_Beep_Delay, FOLLBY_TOKEN },
199: { "duration", T_Duration, FOLLBY_TOKEN },
200: { "server_offset", T_Server_Offset, FOLLBY_TOKEN },
201: { "freq_offset", T_Freq_Offset, FOLLBY_TOKEN },
202: { "wander", T_Wander, FOLLBY_TOKEN },
203: { "jitter", T_Jitter, FOLLBY_TOKEN },
204: { "prop_delay", T_Prop_Delay, FOLLBY_TOKEN },
205: { "proc_delay", T_Proc_Delay, FOLLBY_TOKEN },
206: };
207:
208:
209: typedef struct big_scan_state_tag {
210: char ch; /* Character this state matches on */
211: char followedby; /* Forces next token(s) to T_String */
212: u_short finishes_token; /* nonzero ID if last keyword char */
213: u_short match_next_s; /* next state to check matching ch */
214: u_short other_next_s; /* next state to check if not ch */
215: } big_scan_state;
216:
217: /*
218: * Note: to increase MAXSTATES beyond 2048, be aware it is currently
219: * crammed into 11 bits in scan_state form. Raising to 4096 would be
220: * relatively easy by storing the followedby value in a separate
221: * array with one entry per token, and shrinking the char value to
222: * 7 bits to free a bit for accepting/non-accepting. More than 4096
223: * states will require expanding scan_state beyond 32 bits each.
224: */
225: #define MAXSTATES 2048
226:
227: const char * current_keyword;/* for error reporting */
228: big_scan_state sst[MAXSTATES]; /* scanner FSM state entries */
229: int sst_highwater; /* next entry index to consider */
230: char * symb[1024]; /* map token ID to symbolic name */
231:
232: /* for libntp */
233: const char * progname = "keyword-gen";
234: volatile int debug = 1;
235:
236: int main (int, char **);
237: static void generate_preamble (void);
238: static void generate_fsm (void);
239: static void generate_token_text (void);
240: static int create_keyword_scanner (void);
241: static int create_scan_states (char *, int, follby, int);
242: int compare_key_tok_id (QSORTP, QSORTP);
243: int compare_key_tok_text (QSORTP, QSORTP);
244: void populate_symb (char *);
245: const char * symbname (int);
246:
247:
248: int main(int argc, char **argv)
249: {
250: if (argc < 2) {
251: fprintf(stderr, "Usage:\n%s t_header.h\n", argv[0]);
252: exit(1);
253: }
254: populate_symb(argv[1]);
255:
256: generate_preamble();
257: generate_token_text();
258: generate_fsm();
259:
260: return 0;
261: }
262:
263:
264: static void
265: generate_preamble(void)
266: {
267: time_t now;
268: char timestamp[128];
269: char preamble[] =
270: "/*\n"
271: " * ntp_keyword.h\n"
272: " * \n"
273: " * NOTE: edit this file with caution, it is generated by keyword-gen.c\n"
274: " *\t Generated %s UTC diff_ignore_line\n"
275: " *\n"
276: " */\n"
277: "#include \"ntp_scanner.h\"\n"
278: "#include \"ntp_parser.h\"\n"
279: "\n";
280:
281: time(&now);
282: if (!strftime(timestamp, sizeof(timestamp),
283: "%Y-%m-%d %H:%M:%S", gmtime(&now)))
284: timestamp[0] = '\0';
285:
286: printf(preamble, timestamp);
287: }
288:
289:
290: static void
291: generate_fsm(void)
292: {
293: char token_id_comment[128];
294: int initial_state;
295: int i;
296: int token;
297:
298: /*
299: * Sort ntp_keywords in alphabetical keyword order. This is
300: * not necessary, but minimizes nonfunctional changes in the
301: * generated finite state machine when keywords are modified.
302: */
303: qsort(ntp_keywords, COUNTOF(ntp_keywords),
304: sizeof(ntp_keywords[0]), compare_key_tok_text);
305:
306: /*
307: * To save space, reserve the state array entry matching each
308: * token number for its terminal state, so the token identifier
309: * does not need to be stored in each state, but can be
310: * recovered trivially. To mark the entry reserved,
311: * finishes_token is nonzero.
312: */
313:
314: for (i = 0; i < COUNTOF(ntp_keywords); i++) {
315: token = ntp_keywords[i].token;
316: if (1 > token || token >= COUNTOF(sst)) {
317: fprintf(stderr,
318: "keyword-gen sst[%u] too small "
319: "for keyword '%s' id %d\n",
320: COUNTOF(sst),
321: ntp_keywords[i].key,
322: token);
323: exit(4);
324: }
325: sst[token].finishes_token = token;
326: }
327:
328: initial_state = create_keyword_scanner();
329:
330: fprintf(stderr,
331: "%d keywords consumed %d states of %d max.\n",
332: (int)COUNTOF(ntp_keywords),
333: sst_highwater - 1,
334: (int)COUNTOF(sst) - 1);
335:
336: printf("#define SCANNER_INIT_S %d\n\n", initial_state);
337:
338: printf("const scan_state sst[%d] = {\n"
339: "/*SS_T( ch,\tf-by, match, other ),\t\t\t\t */\n"
340: " 0,\t\t\t\t /* %5d %-17s */\n",
341: sst_highwater,
342: 0, "");
343:
344: for (i = 1; i < sst_highwater; i++) {
345:
346: /* verify fields will fit */
347: if (sst[i].followedby & ~0x3) {
348: fprintf(stderr,
349: "keyword-gen internal error "
350: "sst[%d].followedby %d too big\n",
351: i, sst[i].followedby);
352: exit(7);
353: }
354:
355: if (sst_highwater <= sst[i].match_next_s
356: || sst[i].match_next_s & ~0x7ff) {
357: fprintf(stderr,
358: "keyword-gen internal error "
359: "sst[%d].match_next_s %d too big\n",
360: i, sst[i].match_next_s);
361: exit(8);
362: }
363:
364: if (sst_highwater <= sst[i].other_next_s
365: || sst[i].other_next_s & ~0x7ff) {
366: fprintf(stderr,
367: "keyword-gen internal error "
368: "sst[%d].other_next_s %d too big\n",
369: i, sst[i].other_next_s);
370: exit(9);
371: }
372:
373: if (!sst[i].finishes_token)
374: snprintf(token_id_comment,
375: sizeof(token_id_comment), "%5d %-17s",
376: i, (initial_state == i)
377: ? "initial state"
378: : "");
379: else {
380: snprintf(token_id_comment,
381: sizeof(token_id_comment), "%5d %-17s",
382: i, symbname(sst[i].finishes_token));
383: if (i != sst[i].finishes_token) {
384: fprintf(stderr,
385: "keyword-gen internal error "
386: "entry %d finishes token %d\n",
387: i, sst[i].finishes_token);
388: exit(5);
389: }
390: }
391:
392: printf(" S_ST( '%c',\t%d, %5u, %5u )%s /* %s */\n",
393: sst[i].ch,
394: sst[i].followedby,
395: sst[i].match_next_s,
396: sst[i].other_next_s,
397: (i + 1 < sst_highwater)
398: ? ","
399: : " ",
400: token_id_comment);
401: }
402:
403: printf("};\n\n");
404: }
405:
406:
407: /* Define a function to create the states of the scanner. This function
408: * is used by the create_keyword_scanner function below.
409: *
410: * This function takes a suffix of a keyword, the token to be returned on
411: * recognizing the complete keyword, and any pre-existing state that exists
412: * for some other keyword that has the same prefix as the current one.
413: */
414: static int
415: create_scan_states(
416: char * text,
417: int token,
418: follby followedby,
419: int prev_state
420: )
421: {
422: int my_state;
423: int return_state;
424: int prev_char_s;
425: int curr_char_s;
426:
427: return_state = prev_state;
428: curr_char_s = prev_state;
429: prev_char_s = 0;
430:
431: /* Find the correct position to insert the state.
432: * All states should be in alphabetical order
433: */
434: while (curr_char_s && (text[0] < sst[curr_char_s].ch)) {
435: prev_char_s = curr_char_s;
436: curr_char_s = sst[curr_char_s].other_next_s;
437: }
438:
439: /*
440: * Check if a previously seen keyword has the same prefix as
441: * the current keyword. If so, simply use the state for that
442: * keyword as my_state, otherwise, allocate a new state.
443: */
444: if (curr_char_s && (text[0] == sst[curr_char_s].ch)) {
445: my_state = curr_char_s;
446: if ('\0' == text[1]) {
447: fprintf(stderr,
448: "Duplicate entries for keyword '%s' in"
449: " keyword_gen.c ntp_keywords[].\n",
450: current_keyword);
451: exit(2);
452: }
453: } else {
454: do
455: my_state = sst_highwater++;
456: while (my_state < COUNTOF(sst)
457: && sst[my_state].finishes_token);
458: if (my_state >= COUNTOF(sst)) {
459: fprintf(stderr,
460: "fatal, keyword scanner state array "
461: "sst[%d] is too small, modify\n"
462: "keyword-gen.c to increase.\n",
463: (int)COUNTOF(sst));
464: exit(3);
465: }
466: /* Store the next character of the keyword */
467: sst[my_state].ch = text[0];
468: sst[my_state].other_next_s = curr_char_s;
469: sst[my_state].followedby = FOLLBY_NON_ACCEPTING;
470:
471: if (prev_char_s)
472: sst[prev_char_s].other_next_s = my_state;
473: else
474: return_state = my_state;
475: }
476:
477: /* Check if the next character is '\0'.
478: * If yes, we are done with the recognition and this is an accepting
479: * state.
480: * If not, we need to continue scanning
481: */
482: if ('\0' == text[1]) {
483: sst[my_state].finishes_token = (u_short)token;
484: sst[my_state].followedby = (char)followedby;
485:
486: if (sst[token].finishes_token != (u_short)token) {
487: fprintf(stderr,
488: "fatal, sst[%d] not reserved for %s.\n",
489: token, symbname(token));
490: exit(6);
491: }
492: /* relocate so token id is sst[] index */
493: if (my_state != token) {
494: sst[token] = sst[my_state];
495: memset(&sst[my_state], 0,
496: sizeof(sst[my_state]));
497: do
498: sst_highwater--;
499: while (sst[sst_highwater].finishes_token);
500: my_state = token;
501: if (prev_char_s)
502: sst[prev_char_s].other_next_s = my_state;
503: else
504: return_state = my_state;
505: }
506: } else
507: sst[my_state].match_next_s =
508: create_scan_states(
509: &text[1],
510: token,
511: followedby,
512: sst[my_state].match_next_s);
513:
514: return return_state;
515: }
516:
517:
518: /* Define a function that takes a list of (keyword, token) values and
519: * creates a keywords scanner out of it.
520: */
521:
522: static int
523: create_keyword_scanner(void)
524: {
525: int scanner;
526: int i;
527:
528: sst_highwater = 1; /* index 0 invalid, unused */
529: scanner = 0;
530:
531: for (i = 0; i < COUNTOF(ntp_keywords); i++) {
532: current_keyword = ntp_keywords[i].key;
533: scanner =
534: create_scan_states(
535: ntp_keywords[i].key,
536: ntp_keywords[i].token,
537: ntp_keywords[i].followedby,
538: scanner);
539: }
540:
541: return scanner;
542: }
543:
544:
545: static void
546: generate_token_text(void)
547: {
548: int lowest_id;
549: int highest_id;
550: int id_count;
551: int id;
552: int i;
553:
554: /* sort ntp_keywords in token ID order */
555: qsort(ntp_keywords, COUNTOF(ntp_keywords),
556: sizeof(ntp_keywords[0]), compare_key_tok_id);
557:
558: lowest_id = ntp_keywords[0].token;
559: highest_id = ntp_keywords[COUNTOF(ntp_keywords) - 1].token;
560: id_count = highest_id - lowest_id + 1;
561:
562: printf("#define LOWEST_KEYWORD_ID %d\n\n", lowest_id);
563:
564: printf("const char * const keyword_text[%d] = {", id_count);
565:
566: id = lowest_id;
567: i = 0;
568: while (i < COUNTOF(ntp_keywords)) {
569: while (id < ntp_keywords[i].token) {
570: printf(",\n\t/* %-5d %5d %20s */\tNULL",
571: id - lowest_id, id, symbname(id));
572: id++;
573: }
574: if (i > 0)
575: printf(",");
576: printf("\n\t/* %-5d %5d %20s */\t\"%s\"",
577: id - lowest_id, id, symbname(id),
578: ntp_keywords[i].key);
579: i++;
580: id++;
581: }
582:
583: printf("\n};\n\n");
584: }
585:
586:
587: int
588: compare_key_tok_id(
589: QSORTP a1,
590: QSORTP a2
591: )
592: {
593: const struct key_tok *p1 = (const void *)a1;
594: const struct key_tok *p2 = (const void *)a2;
595:
596: if (p1->token == p2->token)
597: return 0;
598:
599: if (p1->token < p2->token)
600: return -1;
601: else
602: return 1;
603: }
604:
605:
606: int
607: compare_key_tok_text(
608: QSORTP a1,
609: QSORTP a2
610: )
611: {
612: const struct key_tok *p1 = (const void *)a1;
613: const struct key_tok *p2 = (const void *)a2;
614:
615: return strcmp(p1->key, p2->key);
616: }
617:
618:
619: /*
620: * populate_symb() - populate symb[] lookup array with symbolic token
621: * names such that symb[T_Age] == "T_Age", etc.
622: */
623: void
624: populate_symb(
625: char *header_file
626: )
627: {
628: FILE * yh;
629: char line[128];
630: char name[128];
631: int token;
632:
633: yh = fopen(header_file, "r");
634: if (NULL == yh) {
635: perror("unable to open yacc/bison header file");
636: exit(4);
637: }
638:
639: while (NULL != fgets(line, sizeof(line), yh))
640: if (2 == sscanf(line, "#define %s %d", name, &token)
641: && 'T' == name[0] && '_' == name[1] && token >= 0
642: && token < COUNTOF(symb))
643:
644: symb[token] = estrdup(name);
645:
646: fclose(yh);
647: }
648:
649:
650: const char *
651: symbname(
652: int token
653: )
654: {
655: char *name;
656:
657: if (token >= 0 && token < COUNTOF(symb) && symb[token] != NULL)
658: return symb[token];
659:
660: LIB_GETBUF(name);
661: snprintf(name, LIB_BUFLENGTH, "%d", token);
662: return name;
663: }
FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>