summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorGuido van Rossum <guido@python.org>1998-12-22 05:19:29 (GMT)
committerGuido van Rossum <guido@python.org>1998-12-22 05:19:29 (GMT)
commit9c30c24bc2285438dbe94c28c05f1aa9127a04a3 (patch)
tree3caa5411aab7ca0aeea1b72dad1c9ca9d4cba6ae /Lib
parent5e97c9dff91ad885d36a9d26f97549f198b90ee8 (diff)
downloadcpython-9c30c24bc2285438dbe94c28c05f1aa9127a04a3.zip
cpython-9c30c24bc2285438dbe94c28c05f1aa9127a04a3.tar.gz
cpython-9c30c24bc2285438dbe94c28c05f1aa9127a04a3.tar.bz2
Adding shlex and netrc by Eric Raymond.
Diffstat (limited to 'Lib')
-rw-r--r--Lib/netrc.py93
-rw-r--r--Lib/shlex.py119
2 files changed, 212 insertions, 0 deletions
diff --git a/Lib/netrc.py b/Lib/netrc.py
new file mode 100644
index 0000000..7c1de26
--- /dev/null
+++ b/Lib/netrc.py
@@ -0,0 +1,93 @@
+#!/usr/bin/python
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
+
+import sys, os, string, shlex
+
+class netrc:
+ def __init__(self, file=None):
+ if not file:
+ file = os.path.join(os.environ['HOME'], ".netrc")
+ try:
+ fp = open(file)
+ except:
+ return None
+ self.hosts = {}
+ self.macros = {}
+ lexer = shlex.shlex(fp)
+ lexer.wordchars = lexer.wordchars + '.'
+ while 1:
+ # Look for a machine, default, or macdef top-level keyword
+ toplevel = tt = lexer.get_token()
+ if tt == '' or tt == None:
+ break
+ elif tt == 'machine':
+ entryname = lexer.get_token()
+ elif tt == 'default':
+ entryname = 'default'
+ elif tt == 'macdef': # Just skip to end of macdefs
+ entryname = lexer.get_token()
+ self.macros[entryname] = []
+ lexer.whitepace = ' \t'
+ while 1:
+ line = lexer.instream.readline()
+ if not line or line == '\012' and tt == '\012':
+ lexer.whitepace = ' \t\r\n'
+ break
+ tt = line
+ self.macros[entryname].append(line)
+ else:
+ raise SyntaxError, "bad toplevel token %s, file %s, line %d" \
+ % (tt, file, lexer.lineno)
+
+ # We're looking at start of an entry for a named machine or default.
+ if toplevel == 'machine':
+ login = account = password = None
+ self.hosts[entryname] = {}
+ while 1:
+ tt = lexer.get_token()
+ if tt=='' or tt == 'machine' or tt == 'default' or tt == 'macdef':
+ if toplevel == 'macdef':
+ break;
+ elif login and password:
+ self.hosts[entryname] = (login, account, password)
+ lexer.push_token(tt)
+ break
+ else:
+ raise SyntaxError, "malformed %s entry %s terminated by %s" % (toplevel, entryname, repr(tt))
+ elif tt == 'login' or tt == 'user':
+ login = lexer.get_token()
+ elif tt == 'account':
+ account = lexer.get_token()
+ elif tt == 'password':
+ password = lexer.get_token()
+ else:
+ raise SyntaxError, "bad follower token %s, file %s, line %d"%(tt,file,lexer.lineno)
+
+ def authenticators(self, host):
+ "Return a (user, account, password) tuple for given host."
+ if self.hosts.has_key(host):
+ return self.hosts[host]
+ elif self.hosts.has_key('default'):
+ return self.hosts['default']
+ else:
+ return None
+
+ def __repr__(self):
+ "Dump the class data in the format of a .netrc file"
+ rep = ""
+ for host in self.hosts.keys():
+ attrs = self.hosts[host]
+ rep = rep + "machine "+ host + "\n\tlogin " + repr(attrs[0]) + "\n"
+ if attrs[1]:
+ rep = rep + "account " + repr(attrs[1])
+ rep = rep + "\tpassword " + repr(attrs[2]) + "\n"
+ for macro in self.macros.keys():
+ rep = rep + "macdef " + macro + "\n"
+ for line in self.macros[macro]:
+ rep = rep + line
+ rep = rep + "\n"
+ return rep
+
+if __name__ == '__main__':
+ print netrc()
+
diff --git a/Lib/shlex.py b/Lib/shlex.py
new file mode 100644
index 0000000..8b4ac7a
--- /dev/null
+++ b/Lib/shlex.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
+
+import sys, os, string
+
+class shlex:
+ "A lexical analyzer class for simple shell-like syntaxes."
+ def __init__(self, instream=None):
+ if instream:
+ self.instream = instream
+ else:
+ self.instream = sys.stdin
+ self.commenters = '#'
+ self.wordchars = 'abcdfeghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
+ self.whitespace = ' \t\r\n'
+ self.quotes = '\'"'
+ self.state = ' '
+ self.pushback = [];
+ self.lineno = 1
+ self.debug = 0
+ self.token = ''
+
+ def push_token(self, tok):
+ "Push a token onto the stack popped by the get_token method"
+ if (self.debug >= 1):
+ print "Pushing " + tok
+ self.pushback = [tok] + self.pushback;
+
+ def get_token(self):
+ "Get a token from the input stream (or from stack if it's monempty)"
+ if self.pushback:
+ tok = self.pushback[0]
+ self.pushback = self.pushback[1:]
+ if (self.debug >= 1):
+ print "Popping " + tok
+ return tok
+ tok = ''
+ while 1:
+ nextchar = self.instream.read(1);
+ if nextchar == '\n':
+ self.lineno = self.lineno + 1
+ if self.debug >= 3:
+ print "In state " + repr(self.state) + " I see character: " + repr(nextchar)
+ if self.state == None:
+ return ''
+ elif self.state == ' ':
+ if not nextchar:
+ self.state = None; # end of file
+ break
+ elif nextchar in self.whitespace:
+ if self.debug >= 2:
+ print "I see whitespace in whitespace state"
+ if self.token:
+ break # emit current token
+ else:
+ continue
+ elif nextchar in self.commenters:
+ self.instream.readline()
+ self.lineno = self.lineno + 1
+ elif nextchar in self.wordchars:
+ self.token = nextchar
+ self.state = 'a'
+ elif nextchar in self.quotes:
+ self.token = nextchar
+ self.state = nextchar
+ else:
+ self.token = nextchar
+ if self.token:
+ break # emit current token
+ else:
+ continue
+ elif self.state in self.quotes:
+ self.token = self.token + nextchar
+ if nextchar == self.state:
+ self.state = ' '
+ break
+ elif self.state == 'a':
+ if not nextchar:
+ self.state = None; # end of file
+ break
+ elif nextchar in self.whitespace:
+ if self.debug >= 2:
+ print "I see whitespace in word state"
+ self.state = ' '
+ if self.token:
+ break # emit current token
+ else:
+ continue
+ elif nextchar in self.commenters:
+ self.instream.readline()
+ self.lineno = self.lineno + 1
+ elif nextchar in self.wordchars or nextchar in self.quotes:
+ self.token = self.token + nextchar
+ else:
+ self.pushback = [nextchar] + self.pushback
+ if self.debug >= 2:
+ print "I see punctuation in word state"
+ state = ' '
+ if self.token:
+ break # emit current token
+ else:
+ continue
+
+ result = self.token
+ self.token = ''
+ if self.debug >= 1:
+ print "Token: " + result
+ return result
+
+if __name__ == '__main__':
+
+ lexer = shlex()
+ while 1:
+ tt = lexer.get_token()
+ if tt != None:
+ print "Token: " + repr(tt)
+ else:
+ break
+