1 # -*- coding: iso-8859-1 -*-
2 """A lexical analyzer class for simple shell-like syntaxes."""
4 # Module and documentation by Eric S. Raymond, 21 Dec 1998
5 # Input stacking and error message cleanup added by ESR, March 2000
6 # push_source() and pop_source() made explicit by ESR, January 2001.
7 # Posix compliance, split(), string arguments, and
8 # iterator interface by Gustavo Niemeyer, April 2003.
12 #from collections import deque
19 def appendleft(self, item):
20 self.data.insert(0, item)
22 return self.data.pop(0)
29 return type(s) is types.StringType
32 return isinstance(s, basestring)
35 from cStringIO import StringIO
37 from StringIO import StringIO
39 __all__ = ["shlex", "split"]
42 "A lexical analyzer class for simple shell-like syntaxes."
43 def __init__(self, instream=None, infile=None, posix=False):
44 if is_basestring(instream):
45 instream = StringIO(instream)
46 if instream is not None:
47 self.instream = instream
50 self.instream = sys.stdin
58 self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
59 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
61 self.wordchars = self.wordchars + ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ'
62 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ')
63 self.whitespace = ' \t\r\n'
64 self.whitespace_split = False
67 self.escapedquotes = '"'
69 self.pushback = deque()
73 self.filestack = deque()
76 print 'shlex: reading from %s, line %d' \
77 % (self.instream, self.lineno)
79 def push_token(self, tok):
80 "Push a token onto the stack popped by the get_token method"
82 print "shlex: pushing token " + repr(tok)
83 self.pushback.appendleft(tok)
85 def push_source(self, newstream, newfile=None):
86 "Push an input source onto the lexer's input source stack."
87 if is_basestring(newstream):
88 newstream = StringIO(newstream)
89 self.filestack.appendleft((self.infile, self.instream, self.lineno))
91 self.instream = newstream
94 if newfile is not None:
95 print 'shlex: pushing to file %s' % (self.infile,)
97 print 'shlex: pushing to stream %s' % (self.instream,)
100 "Pop the input source stack."
101 self.instream.close()
102 (self.infile, self.instream, self.lineno) = self.filestack.popleft()
104 print 'shlex: popping to %s, line %d' \
105 % (self.instream, self.lineno)
109 "Get a token from the input stream (or from stack if it's nonempty)"
111 tok = self.pushback.popleft()
113 print "shlex: popping token " + repr(tok)
115 # No pushback. Get a token.
116 raw = self.read_token()
118 if self.source is not None:
119 while raw == self.source:
120 spec = self.sourcehook(self.read_token())
122 (newfile, newstream) = spec
123 self.push_source(newstream, newfile)
124 raw = self.get_token()
125 # Maybe we got EOF instead?
126 while raw == self.eof:
127 if not self.filestack:
131 raw = self.get_token()
132 # Neither inclusion nor EOF
135 print "shlex: token=" + repr(raw)
137 print "shlex: token=EOF"
140 def read_token(self):
144 nextchar = self.instream.read(1)
146 self.lineno = self.lineno + 1
148 print "shlex: in state", repr(self.state), \
149 "I see character:", repr(nextchar)
150 if self.state is None:
151 self.token = '' # past end of file
153 elif self.state == ' ':
155 self.state = None # end of file
157 elif nextchar in self.whitespace:
159 print "shlex: I see whitespace in whitespace state"
160 if self.token or (self.posix and quoted):
161 break # emit current token
164 elif nextchar in self.commenters:
165 self.instream.readline()
166 self.lineno = self.lineno + 1
167 elif self.posix and nextchar in self.escape:
169 self.state = nextchar
170 elif nextchar in self.wordchars:
171 self.token = nextchar
173 elif nextchar in self.quotes:
175 self.token = nextchar
176 self.state = nextchar
177 elif self.whitespace_split:
178 self.token = nextchar
181 self.token = nextchar
182 if self.token or (self.posix and quoted):
183 break # emit current token
186 elif self.state in self.quotes:
188 if not nextchar: # end of file
190 print "shlex: I see EOF in quotes state"
191 # XXX what error should be raised here?
192 raise ValueError, "No closing quotation"
193 if nextchar == self.state:
195 self.token = self.token + nextchar
200 elif self.posix and nextchar in self.escape and \
201 self.state in self.escapedquotes:
202 escapedstate = self.state
203 self.state = nextchar
205 self.token = self.token + nextchar
206 elif self.state in self.escape:
207 if not nextchar: # end of file
209 print "shlex: I see EOF in escape state"
210 # XXX what error should be raised here?
211 raise ValueError, "No escaped character"
212 # In posix shells, only the quote itself or the escape
213 # character may be escaped within quotes.
214 if escapedstate in self.quotes and \
215 nextchar != self.state and nextchar != escapedstate:
216 self.token = self.token + self.state
217 self.token = self.token + nextchar
218 self.state = escapedstate
219 elif self.state == 'a':
221 self.state = None # end of file
223 elif nextchar in self.whitespace:
225 print "shlex: I see whitespace in word state"
227 if self.token or (self.posix and quoted):
228 break # emit current token
231 elif nextchar in self.commenters:
232 self.instream.readline()
233 self.lineno = self.lineno + 1
236 if self.token or (self.posix and quoted):
237 break # emit current token
240 elif self.posix and nextchar in self.quotes:
241 self.state = nextchar
242 elif self.posix and nextchar in self.escape:
244 self.state = nextchar
245 elif nextchar in self.wordchars or nextchar in self.quotes \
246 or self.whitespace_split:
247 self.token = self.token + nextchar
249 self.pushback.appendleft(nextchar)
251 print "shlex: I see punctuation in word state"
254 break # emit current token
259 if self.posix and not quoted and result == '':
263 print "shlex: raw token=" + repr(result)
265 print "shlex: raw token=EOF"
268 def sourcehook(self, newfile):
269 "Hook called on a filename to be sourced."
270 if newfile[0] == '"':
271 newfile = newfile[1:-1]
272 # This implements cpp-like semantics for relative-path inclusion.
273 if is_basestring(self.infile) and not os.path.isabs(newfile):
274 newfile = os.path.join(os.path.dirname(self.infile), newfile)
275 return (newfile, open(newfile, "r"))
277 def error_leader(self, infile=None, lineno=None):
278 "Emit a C-compiler-like, Emacs-friendly error-message leader."
283 return "\"%s\", line %d: " % (infile, lineno)
289 token = self.get_token()
290 if token == self.eof:
294 def split(s, comments=False):
295 lex = shlex(s, posix=True)
296 lex.whitespace_split = True
302 token = lex.get_token()
308 if __name__ == '__main__':
309 if len(sys.argv) == 1:
313 lexer = shlex(open(file), file)
315 tt = lexer.get_token()
317 print "Token: " + repr(tt)