Location | Tool | Test ID | Function | Issue |
---|---|---|---|---|
/builddir/build/BUILD/Python-2.7.3/Parser/parsetok.c:134:43 | gcc | unused-but-set-variable | parsetok | variable 'handling_with' set but not used |
/builddir/build/BUILD/Python-2.7.3/Parser/parsetok.c:134:43 | gcc | unused-but-set-variable | parsetok | variable 'handling_with' set but not used |
/builddir/build/BUILD/Python-2.7.3/Parser/parsetok.c:166:13 | clang-analyzer | Value stored to 'handling_with' is never read | ||
/builddir/build/BUILD/Python-2.7.3/Parser/parsetok.c:166:13 | clang-analyzer | Value stored to 'handling_with' is never read |
1 /* Parser-tokenizer link implementation */
2
3 #include "pgenheaders.h"
4 #include "tokenizer.h"
5 #include "node.h"
6 #include "grammar.h"
7 #include "parser.h"
8 #include "parsetok.h"
9 #include "errcode.h"
10 #include "graminit.h"
11
12 int Py_TabcheckFlag;
13
14
15 /* Forward */
16 static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int *);
17 static void initerr(perrdetail *err_ret, const char* filename);
18
19 /* Parse input coming from a string. Return error code, print some errors. */
20 node *
21 PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
22 {
23 return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
24 }
25
26 node *
27 PyParser_ParseStringFlags(const char *s, grammar *g, int start,
28 perrdetail *err_ret, int flags)
29 {
30 return PyParser_ParseStringFlagsFilename(s, NULL,
31 g, start, err_ret, flags);
32 }
33
34 node *
35 PyParser_ParseStringFlagsFilename(const char *s, const char *filename,
36 grammar *g, int start,
37 perrdetail *err_ret, int flags)
38 {
39 int iflags = flags;
40 return PyParser_ParseStringFlagsFilenameEx(s, filename, g, start,
41 err_ret, &iflags);
42 }
43
44 node *
45 PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename,
46 grammar *g, int start,
47 perrdetail *err_ret, int *flags)
48 {
49 struct tok_state *tok;
50
51 initerr(err_ret, filename);
52
53 if ((tok = PyTokenizer_FromString(s, start == file_input)) == NULL) {
54 err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;
55 return NULL;
56 }
57
58 tok->filename = filename ? filename : "<string>";
59 if (Py_TabcheckFlag || Py_VerboseFlag) {
60 tok->altwarning = (tok->filename != NULL);
61 if (Py_TabcheckFlag >= 2)
62 tok->alterror++;
63 }
64
65 return parsetok(tok, g, start, err_ret, flags);
66 }
67
68 /* Parse input coming from a file. Return error code, print some errors. */
69
70 node *
71 PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
72 char *ps1, char *ps2, perrdetail *err_ret)
73 {
74 return PyParser_ParseFileFlags(fp, filename, g, start, ps1, ps2,
75 err_ret, 0);
76 }
77
78 node *
79 PyParser_ParseFileFlags(FILE *fp, const char *filename, grammar *g, int start,
80 char *ps1, char *ps2, perrdetail *err_ret, int flags)
81 {
82 int iflags = flags;
83 return PyParser_ParseFileFlagsEx(fp, filename, g, start, ps1, ps2, err_ret, &iflags);
84 }
85
86 node *
87 PyParser_ParseFileFlagsEx(FILE *fp, const char *filename, grammar *g, int start,
88 char *ps1, char *ps2, perrdetail *err_ret, int *flags)
89 {
90 struct tok_state *tok;
91
92 initerr(err_ret, filename);
93
94 if ((tok = PyTokenizer_FromFile(fp, ps1, ps2)) == NULL) {
95 err_ret->error = E_NOMEM;
96 return NULL;
97 }
98 tok->filename = filename;
99 if (Py_TabcheckFlag || Py_VerboseFlag) {
100 tok->altwarning = (filename != NULL);
101 if (Py_TabcheckFlag >= 2)
102 tok->alterror++;
103 }
104
105 return parsetok(tok, g, start, err_ret, flags);
106 }
107
108 #if 0
109 static char with_msg[] =
110 "%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
111
112 static char as_msg[] =
113 "%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";
114
115 static void
116 warn(const char *msg, const char *filename, int lineno)
117 {
118 if (filename == NULL)
119 filename = "<string>";
120 PySys_WriteStderr(msg, filename, lineno);
121 }
122 #endif
123
124 /* Parse input coming from the given tokenizer structure.
125 Return error code. */
126
127 static node *
128 parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
129 int *flags)
130 {
131 parser_state *ps;
132 node *n;
133 int started = 0, handling_import = 0, handling_with = 0;
134
(emitted by gcc) (emitted by gcc) 135 if ((ps = PyParser_New(g, start)) == NULL) {
136 fprintf(stderr, "no mem for new parser\n");
137 err_ret->error = E_NOMEM;
138 PyTokenizer_Free(tok);
139 return NULL;
140 }
141 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
142 if (*flags & PyPARSE_PRINT_IS_FUNCTION) {
143 ps->p_flags |= CO_FUTURE_PRINT_FUNCTION;
144 }
145 if (*flags & PyPARSE_UNICODE_LITERALS) {
146 ps->p_flags |= CO_FUTURE_UNICODE_LITERALS;
147 }
148
149 #endif
150
151 for (;;) {
152 char *a, *b;
153 int type;
154 size_t len;
155 char *str;
156 int col_offset;
157
158 type = PyTokenizer_Get(tok, &a, &b);
159 if (type == ERRORTOKEN) {
160 err_ret->error = tok->done;
161 break;
162 }
163 if (type == ENDMARKER && started) {
164 type = NEWLINE; /* Add an extra newline */
165 handling_with = handling_import = 0;
166 started = 0;
(emitted by clang-analyzer)TODO: a detailed trace is available in the data model (not yet rendered in this report)
(emitted by clang-analyzer)TODO: a detailed trace is available in the data model (not yet rendered in this report)
167 /* Add the right number of dedent tokens,
168 except if a certain flag is given --
169 codeop.py uses this. */
170 if (tok->indent &&
171 !(*flags & PyPARSE_DONT_IMPLY_DEDENT))
172 {
173 tok->pendin = -tok->indent;
174 tok->indent = 0;
175 }
176 }
177 else
178 started = 1;
179 len = b - a; /* XXX this may compute NULL - NULL */
180 str = (char *) PyObject_MALLOC(len + 1);
181 if (str == NULL) {
182 fprintf(stderr, "no mem for next token\n");
183 err_ret->error = E_NOMEM;
184 break;
185 }
186 if (len > 0)
187 strncpy(str, a, len);
188 str[len] = '\0';
189
190 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
191 #endif
192 if (a >= tok->line_start)
193 col_offset = a - tok->line_start;
194 else
195 col_offset = -1;
196
197 if ((err_ret->error =
198 PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset,
199 &(err_ret->expected))) != E_OK) {
200 if (err_ret->error != E_DONE) {
201 PyObject_FREE(str);
202 err_ret->token = type;
203 }
204 break;
205 }
206 }
207
208 if (err_ret->error == E_DONE) {
209 n = ps->p_tree;
210 ps->p_tree = NULL;
211 }
212 else
213 n = NULL;
214
215 #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
216 *flags = ps->p_flags;
217 #endif
218 PyParser_Delete(ps);
219
220 if (n == NULL) {
221 if (tok->lineno <= 1 && tok->done == E_EOF)
222 err_ret->error = E_EOF;
223 err_ret->lineno = tok->lineno;
224 if (tok->buf != NULL) {
225 char *text = NULL;
226 size_t len;
227 assert(tok->cur - tok->buf < INT_MAX);
228 err_ret->offset = (int)(tok->cur - tok->buf);
229 len = tok->inp - tok->buf;
230 #ifdef Py_USING_UNICODE
231 text = PyTokenizer_RestoreEncoding(tok, len, &err_ret->offset);
232
233 #endif
234 if (text == NULL) {
235 text = (char *) PyObject_MALLOC(len + 1);
236 if (text != NULL) {
237 if (len > 0)
238 strncpy(text, tok->buf, len);
239 text[len] = '\0';
240 }
241 }
242 err_ret->text = text;
243 }
244 } else if (tok->encoding != NULL) {
245 /* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was
246 * allocated using PyMem_
247 */
248 node* r = PyNode_New(encoding_decl);
249 if (r)
250 r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1);
251 if (!r || !r->n_str) {
252 err_ret->error = E_NOMEM;
253 if (r)
254 PyObject_FREE(r);
255 n = NULL;
256 goto done;
257 }
258 strcpy(r->n_str, tok->encoding);
259 PyMem_FREE(tok->encoding);
260 tok->encoding = NULL;
261 r->n_nchildren = 1;
262 r->n_child = n;
263 n = r;
264 }
265
266 done:
267 PyTokenizer_Free(tok);
268
269 return n;
270 }
271
272 static void
273 initerr(perrdetail *err_ret, const char *filename)
274 {
275 err_ret->error = E_OK;
276 err_ret->filename = filename;
277 err_ret->lineno = 0;
278 err_ret->offset = 0;
279 err_ret->text = NULL;
280 err_ret->token = -1;
281 err_ret->expected = -1;
282 }