diff options
author | Anthony Baxter <anthonybaxter@gmail.com> | 2006-04-11 05:39:14 (GMT) |
---|---|---|
committer | Anthony Baxter <anthonybaxter@gmail.com> | 2006-04-11 05:39:14 (GMT) |
commit | 114900298ea26e5e25edd5d05f24648dcd5ea95b (patch) | |
tree | 9db37abbea9f0bd5dc1332f6f710de5e521e3c2c /Parser | |
parent | 319c47fcdb3b8196cc580c4fab409b0ee58119fe (diff) | |
download | cpython-114900298ea26e5e25edd5d05f24648dcd5ea95b.zip cpython-114900298ea26e5e25edd5d05f24648dcd5ea95b.tar.gz cpython-114900298ea26e5e25edd5d05f24648dcd5ea95b.tar.bz2 |
Fix the code in Parser/ to also compile with C++. This was mostly casts for
malloc/realloc type functions, as well as renaming one variable called 'new'
in tokensizer.c. Still lots more to be done, going to be checking in one
chunk at a time or the patch will be massively huge. Still compiles ok with
gcc.
Diffstat (limited to 'Parser')
-rw-r--r-- | Parser/bitset.c | 2 | ||||
-rw-r--r-- | Parser/firstsets.c | 5 | ||||
-rw-r--r-- | Parser/grammar.c | 13 | ||||
-rw-r--r-- | Parser/myreadline.c | 6 | ||||
-rw-r--r-- | Parser/parser.c | 2 | ||||
-rw-r--r-- | Parser/pgen.c | 23 | ||||
-rw-r--r-- | Parser/tokenizer.c | 25 |
7 files changed, 40 insertions, 36 deletions
diff --git a/Parser/bitset.c b/Parser/bitset.c index 0f3e01c..b5543b8 100644 --- a/Parser/bitset.c +++ b/Parser/bitset.c @@ -8,7 +8,7 @@ bitset newbitset(int nbits) { int nbytes = NBYTES(nbits); - bitset ss = PyObject_MALLOC(sizeof(BYTE) * nbytes); + bitset ss = (char *)PyObject_MALLOC(sizeof(BYTE) * nbytes); if (ss == NULL) Py_FatalError("no mem for bitset"); diff --git a/Parser/firstsets.c b/Parser/firstsets.c index d6bacef..00467b3 100644 --- a/Parser/firstsets.c +++ b/Parser/firstsets.c @@ -59,7 +59,7 @@ calcfirstset(grammar *g, dfa *d) nbits = g->g_ll.ll_nlabels; result = newbitset(nbits); - sym = PyObject_MALLOC(sizeof(int)); + sym = (int *)PyObject_MALLOC(sizeof(int)); if (sym == NULL) Py_FatalError("no mem for new sym in calcfirstset"); nsyms = 1; @@ -73,7 +73,8 @@ calcfirstset(grammar *g, dfa *d) break; } if (j >= nsyms) { /* New label */ - sym = PyObject_REALLOC(sym, sizeof(int) * (nsyms + 1)); + sym = (int *)PyObject_REALLOC(sym, + sizeof(int) * (nsyms + 1)); if (sym == NULL) Py_FatalError( "no mem to resize sym in calcfirstset"); diff --git a/Parser/grammar.c b/Parser/grammar.c index 880bf84..b0dafe7 100644 --- a/Parser/grammar.c +++ b/Parser/grammar.c @@ -20,7 +20,7 @@ newgrammar(int start) { grammar *g; - g = PyObject_MALLOC(sizeof(grammar)); + g = (grammar *)PyObject_MALLOC(sizeof(grammar)); if (g == NULL) Py_FatalError("no mem for new grammar"); g->g_ndfas = 0; @@ -37,7 +37,8 @@ adddfa(grammar *g, int type, char *name) { dfa *d; - g->g_dfa = PyObject_REALLOC(g->g_dfa, sizeof(dfa) * (g->g_ndfas + 1)); + g->g_dfa = (dfa *)PyObject_REALLOC(g->g_dfa, + sizeof(dfa) * (g->g_ndfas + 1)); if (g->g_dfa == NULL) Py_FatalError("no mem to resize dfa in adddfa"); d = &g->g_dfa[g->g_ndfas++]; @@ -55,7 +56,7 @@ addstate(dfa *d) { state *s; - d->d_state = PyObject_REALLOC(d->d_state, + d->d_state = (state *)PyObject_REALLOC(d->d_state, sizeof(state) * (d->d_nstates + 1)); if (d->d_state == NULL) Py_FatalError("no mem to resize state in addstate"); @@ -79,7 +80,7 @@ addarc(dfa *d, int from, int to, int lbl) assert(0 <= to && to < d->d_nstates); s = &d->d_state[from]; - s->s_arc = PyObject_REALLOC(s->s_arc, sizeof(arc) * (s->s_narcs + 1)); + s->s_arc = (arc *)PyObject_REALLOC(s->s_arc, sizeof(arc) * (s->s_narcs + 1)); if (s->s_arc == NULL) Py_FatalError("no mem to resize arc list in addarc"); a = &s->s_arc[s->s_narcs++]; @@ -98,7 +99,7 @@ addlabel(labellist *ll, int type, char *str) strcmp(ll->ll_label[i].lb_str, str) == 0) return i; } - ll->ll_label = PyObject_REALLOC(ll->ll_label, + ll->ll_label = (label *)PyObject_REALLOC(ll->ll_label, sizeof(label) * (ll->ll_nlabels + 1)); if (ll->ll_label == NULL) Py_FatalError("no mem to resize labellist in addlabel"); @@ -197,7 +198,7 @@ translabel(grammar *g, label *lb) name_len = p - src; else name_len = strlen(src); - dest = malloc(name_len + 1); + dest = (char *)malloc(name_len + 1); strncpy(dest, src, name_len); dest[name_len] = '\0'; free(lb->lb_str); diff --git a/Parser/myreadline.c b/Parser/myreadline.c index 630997b..7b27ea2 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -111,7 +111,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, char *prompt) size_t n; char *p; n = 100; - if ((p = PyObject_MALLOC(n)) == NULL) + if ((p = (char *)PyObject_MALLOC(n)) == NULL) return NULL; fflush(sys_stdout); #ifndef RISCOS @@ -141,7 +141,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, char *prompt) n = strlen(p); while (n > 0 && p[n-1] != '\n') { size_t incr = n+2; - p = PyObject_REALLOC(p, n + incr); + p = (char *)PyObject_REALLOC(p, n + incr); if (p == NULL) return NULL; if (incr > INT_MAX) { @@ -151,7 +151,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, char *prompt) break; n += strlen(p+n); } - return PyObject_REALLOC(p, n+1); + return (char *)PyObject_REALLOC(p, n+1); } diff --git a/Parser/parser.c b/Parser/parser.c index 45b613a..04d5817 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -75,7 +75,7 @@ PyParser_New(grammar *g, int start) if (!g->g_accel) PyGrammar_AddAccelerators(g); - ps = PyMem_MALLOC(sizeof(parser_state)); + ps = (parser_state *)PyMem_MALLOC(sizeof(parser_state)); if (ps == NULL) return NULL; ps->p_grammar = g; diff --git a/Parser/pgen.c b/Parser/pgen.c index 6aa1d19..dfe7cac 100644 --- a/Parser/pgen.c +++ b/Parser/pgen.c @@ -49,8 +49,8 @@ addnfastate(nfa *nf) { nfastate *st; - nf->nf_state = PyObject_REALLOC(nf->nf_state, sizeof(nfastate) * - (nf->nf_nstates + 1)); + nf->nf_state = (nfastate *)PyObject_REALLOC(nf->nf_state, + sizeof(nfastate) * (nf->nf_nstates + 1)); if (nf->nf_state == NULL) Py_FatalError("out of mem"); st = &nf->nf_state[nf->nf_nstates++]; @@ -66,7 +66,7 @@ addnfaarc(nfa *nf, int from, int to, int lbl) nfaarc *ar; st = &nf->nf_state[from]; - st->st_arc = PyObject_REALLOC(st->st_arc, + st->st_arc = (nfaarc *)PyObject_REALLOC(st->st_arc, sizeof(nfaarc) * (st->st_narcs + 1)); if (st->st_arc == NULL) Py_FatalError("out of mem"); @@ -81,7 +81,7 @@ newnfa(char *name) nfa *nf; static int type = NT_OFFSET; /* All types will be disjunct */ - nf = PyObject_MALLOC(sizeof(nfa)); + nf = (nfa *)PyObject_MALLOC(sizeof(nfa)); if (nf == NULL) Py_FatalError("no mem for new nfa"); nf->nf_type = type++; @@ -106,7 +106,7 @@ newnfagrammar(void) { nfagrammar *gr; - gr = PyObject_MALLOC(sizeof(nfagrammar)); + gr = (nfagrammar *)PyObject_MALLOC(sizeof(nfagrammar)); if (gr == NULL) Py_FatalError("no mem for new nfa grammar"); gr->gr_nnfas = 0; @@ -123,7 +123,7 @@ addnfa(nfagrammar *gr, char *name) nfa *nf; nf = newnfa(name); - gr->gr_nfa = PyObject_REALLOC(gr->gr_nfa, + gr->gr_nfa = (nfa **)PyObject_REALLOC(gr->gr_nfa, sizeof(nfa) * (gr->gr_nnfas + 1)); if (gr->gr_nfa == NULL) Py_FatalError("out of mem"); @@ -364,7 +364,7 @@ typedef struct _ss_arc { typedef struct _ss_state { bitset ss_ss; int ss_narcs; - ss_arc *ss_arc; + struct _ss_arc *ss_arc; int ss_deleted; int ss_finish; int ss_rename; @@ -395,7 +395,7 @@ makedfa(nfagrammar *gr, nfa *nf, dfa *d) ss = newbitset(nbits); addclosure(ss, nf, nf->nf_start); - xx_state = PyObject_MALLOC(sizeof(ss_state)); + xx_state = (ss_state *)PyObject_MALLOC(sizeof(ss_state)); if (xx_state == NULL) Py_FatalError("no mem for xx_state in makedfa"); xx_nstates = 1; @@ -435,8 +435,8 @@ makedfa(nfagrammar *gr, nfa *nf, dfa *d) } /* Add new arc for this state */ size = sizeof(ss_arc) * (yy->ss_narcs + 1); - yy->ss_arc = PyObject_REALLOC(yy->ss_arc, - size); + yy->ss_arc = (ss_arc *)PyObject_REALLOC( + yy->ss_arc, size); if (yy->ss_arc == NULL) Py_FatalError("out of mem"); zz = &yy->ss_arc[yy->ss_narcs++]; @@ -459,7 +459,8 @@ makedfa(nfagrammar *gr, nfa *nf, dfa *d) } } size = sizeof(ss_state) * (xx_nstates + 1); - xx_state = PyObject_REALLOC(xx_state, size); + xx_state = (ss_state *)PyObject_REALLOC(xx_state, + size); if (xx_state == NULL) Py_FatalError("out of mem"); zz->sa_arrow = xx_nstates; diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 469de27..5fcf49e 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -105,7 +105,8 @@ char *_PyParser_TokenNames[] = { static struct tok_state * tok_new(void) { - struct tok_state *tok = PyMem_MALLOC(sizeof(struct tok_state)); + struct tok_state *tok = (struct tok_state *)PyMem_MALLOC( + sizeof(struct tok_state)); if (tok == NULL) return NULL; tok->buf = tok->cur = tok->end = tok->inp = tok->start = NULL; @@ -775,38 +776,38 @@ tok_nextc(register struct tok_state *tok) return Py_CHARMASK(*tok->cur++); } if (tok->prompt != NULL) { - char *new = PyOS_Readline(stdin, stdout, tok->prompt); + char *newtok = PyOS_Readline(stdin, stdout, tok->prompt); if (tok->nextprompt != NULL) tok->prompt = tok->nextprompt; - if (new == NULL) + if (newtok == NULL) tok->done = E_INTR; - else if (*new == '\0') { - PyObject_FREE(new); + else if (*newtok == '\0') { + PyObject_FREE(newtok); tok->done = E_EOF; } #if !defined(PGEN) && defined(Py_USING_UNICODE) - else if (tok_stdin_decode(tok, &new) != 0) - PyObject_FREE(new); + else if (tok_stdin_decode(tok, &newtok) != 0) + PyObject_FREE(newtok); #endif else if (tok->start != NULL) { size_t start = tok->start - tok->buf; size_t oldlen = tok->cur - tok->buf; - size_t newlen = oldlen + strlen(new); + size_t newlen = oldlen + strlen(newtok); char *buf = tok->buf; buf = (char *)PyObject_REALLOC(buf, newlen+1); tok->lineno++; if (buf == NULL) { PyObject_FREE(tok->buf); tok->buf = NULL; - PyObject_FREE(new); + PyObject_FREE(newtok); tok->done = E_NOMEM; return EOF; } tok->buf = buf; tok->cur = tok->buf + oldlen; tok->line_start = tok->cur; - strcpy(tok->buf + oldlen, new); - PyObject_FREE(new); + strcpy(tok->buf + oldlen, newtok); + PyObject_FREE(newtok); tok->inp = tok->buf + newlen; tok->end = tok->inp + 1; tok->start = tok->buf + start; @@ -815,7 +816,7 @@ tok_nextc(register struct tok_state *tok) tok->lineno++; if (tok->buf != NULL) PyObject_FREE(tok->buf); - tok->buf = new; + tok->buf = newtok; tok->line_start = tok->buf; tok->cur = tok->buf; tok->line_start = tok->buf; |