summaryrefslogtreecommitdiff
path: root/AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h
diff options
context:
space:
mode:
authordarylm503 <darylm503@6f19259b-4bc3-4df7-8a09-765794883524>2012-04-16 22:12:42 +0000
committerdarylm503 <darylm503@6f19259b-4bc3-4df7-8a09-765794883524>2012-04-16 22:12:42 +0000
commit4710c53dcad1ebf3755f3efb9e80ac24bd72a9b2 (patch)
tree2d17d2388a78082e32f6a97120d707328143543b /AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h
parentcbc6b5e54599c7391ece99ad3c5313f4dd4ddda6 (diff)
downloadedk2-platforms-4710c53dcad1ebf3755f3efb9e80ac24bd72a9b2.tar.xz
AppPkg/Applications/Python: Add Python 2.7.2 sources since the release of Python 2.7.3 made them unavailable from the python.org web site.
These files are a subset of the python-2.7.2.tgz distribution from python.org. Changed files from PyMod-2.7.2 have been copied into the corresponding directories of this tree, replacing the original files in the distribution. Signed-off-by: daryl.mcdaniel@intel.com git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@13197 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h')
-rw-r--r--AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h70
1 files changed, 70 insertions, 0 deletions
diff --git a/AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h b/AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h
new file mode 100644
index 0000000000..3de3280d05
--- /dev/null
+++ b/AppPkg/Applications/Python/Python-2.7.2/Parser/tokenizer.h
@@ -0,0 +1,70 @@
+#ifndef Py_TOKENIZER_H
+#define Py_TOKENIZER_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "object.h"
+
+/* Tokenizer interface */
+
+#include "token.h" /* For token types */
+
+#define MAXINDENT 100 /* Max indentation level */
+
+/* Tokenizer state */
+struct tok_state {
+ /* Input state; buf <= cur <= inp <= end */
+ /* NB an entire line is held in the buffer */
+ char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL */
+ char *cur; /* Next character in buffer */
+ char *inp; /* End of data in buffer */
+ char *end; /* End of input buffer if buf != NULL */
+ char *start; /* Start of current token if not NULL */
+ int done; /* E_OK normally, E_EOF at EOF, otherwise error code */
+ /* NB If done != E_OK, cur must be == inp!!! */
+ FILE *fp; /* Rest of input; NULL if tokenizing a string */
+ int tabsize; /* Tab spacing */
+ int indent; /* Current indentation index */
+ int indstack[MAXINDENT]; /* Stack of indents */
+ int atbol; /* Nonzero if at begin of new line */
+ int pendin; /* Pending indents (if > 0) or dedents (if < 0) */
+ char *prompt, *nextprompt; /* For interactive prompting */
+ int lineno; /* Current line number */
+ int level; /* () [] {} Parentheses nesting level */
+ /* Used to allow free continuations inside them */
+ /* Stuff for checking on different tab sizes */
+ const char *filename; /* For error messages */
+ int altwarning; /* Issue warning if alternate tabs don't match */
+ int alterror; /* Issue error if alternate tabs don't match */
+ int alttabsize; /* Alternate tab spacing */
+ int altindstack[MAXINDENT]; /* Stack of alternate indents */
+ /* Stuff for PEP 0263 */
+ int decoding_state; /* -1:decoding, 0:init, 1:raw */
+ int decoding_erred; /* whether erred in decoding */
+ int read_coding_spec; /* whether 'coding:...' has been read */
+ char *encoding;
+ int cont_line; /* whether we are in a continuation line. */
+ const char* line_start; /* pointer to start of current line */
+#ifndef PGEN
+ PyObject *decoding_readline; /* codecs.open(...).readline */
+ PyObject *decoding_buffer;
+#endif
+ const char* enc;
+ const char* str;
+ const char* input; /* Tokenizer's newline translated copy of the string. */
+};
+
+extern struct tok_state *PyTokenizer_FromString(const char *, int);
+extern struct tok_state *PyTokenizer_FromFile(FILE *, char *, char *);
+extern void PyTokenizer_Free(struct tok_state *);
+extern int PyTokenizer_Get(struct tok_state *, char **, char **);
+#if defined(PGEN) || defined(Py_USING_UNICODE)
+extern char * PyTokenizer_RestoreEncoding(struct tok_state* tok,
+ int len, int *offset);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_TOKENIZER_H */