summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorTim Peters <tim.peters@gmail.com>2001-01-15 00:50:52 (GMT)
committerTim Peters <tim.peters@gmail.com>2001-01-15 00:50:52 (GMT)
commit2344fae6d003f5a5dde8016e1d2310e161288708 (patch)
tree296a67c9d61f6f0a32796e7404e71fdf7d1a1fd2 /Lib
parentfa25a7d51fdc4c21ac569d9f62825e337a7a6b4a (diff)
downloadcpython-2344fae6d003f5a5dde8016e1d2310e161288708.zip
cpython-2344fae6d003f5a5dde8016e1d2310e161288708.tar.gz
cpython-2344fae6d003f5a5dde8016e1d2310e161288708.tar.bz2
Whitespace normalization.
Diffstat (limited to 'Lib')
-rw-r--r--Lib/netrc.py11
-rw-r--r--Lib/nntplib.py914
-rw-r--r--Lib/ntpath.py6
-rw-r--r--Lib/nturl2path.py106
-rw-r--r--Lib/os.py6
-rwxr-xr-xLib/pdb.py1766
-rw-r--r--Lib/pickle.py30
-rw-r--r--Lib/pipes.py414
-rw-r--r--Lib/poplib.py430
-rw-r--r--Lib/posixfile.py6
-rw-r--r--Lib/posixpath.py20
-rw-r--r--Lib/pre.py94
-rwxr-xr-xLib/profile.py970
-rw-r--r--Lib/pstats.py907
-rw-r--r--Lib/pty.py226
-rw-r--r--Lib/pyclbr.py488
-rwxr-xr-xLib/quopri.py2
17 files changed, 3197 insertions, 3199 deletions
diff --git a/Lib/netrc.py b/Lib/netrc.py
index 679669f..6bc0292 100644
--- a/Lib/netrc.py
+++ b/Lib/netrc.py
@@ -1,6 +1,6 @@
"""An object-oriented interface to .netrc files."""
-# Module and documentation by Eric S. Raymond, 21 Dec 1998
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
import os, shlex
@@ -12,7 +12,7 @@ class netrc:
self.hosts = {}
self.macros = {}
lexer = shlex.shlex(fp)
- # Allows @ in hostnames. Not a big deal...
+ # Allows @ in hostnames. Not a big deal...
lexer.wordchars = lexer.wordchars + '.-@'
while 1:
# Look for a machine, default, or macdef top-level keyword
@@ -23,7 +23,7 @@ class netrc:
entryname = lexer.get_token()
elif tt == 'default':
entryname = 'default'
- elif tt == 'macdef': # Just skip to end of macdefs
+ elif tt == 'macdef': # Just skip to end of macdefs
entryname = lexer.get_token()
self.macros[entryname] = []
lexer.whitepace = ' \t'
@@ -36,7 +36,7 @@ class netrc:
self.macros[entryname].append(line)
else:
raise SyntaxError, "bad toplevel token %s, file %s, line %d" \
- % (tt, file, lexer.lineno)
+ % (tt, file, lexer.lineno)
# We're looking at start of an entry for a named machine or default.
if toplevel == 'machine':
@@ -87,6 +87,5 @@ class netrc:
rep = rep + "\n"
return rep
-if __name__ == '__main__':
+if __name__ == '__main__':
print netrc()
-
diff --git a/Lib/nntplib.py b/Lib/nntplib.py
index 526677a..16716ec 100644
--- a/Lib/nntplib.py
+++ b/Lib/nntplib.py
@@ -34,36 +34,36 @@ import socket
import string
-
+
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
- """Base class for all nntplib exceptions"""
- def __init__(self, *args):
- apply(Exception.__init__, (self,)+args)
- try:
- self.response = args[0]
- except IndexError:
- self.response = 'No response given'
+ """Base class for all nntplib exceptions"""
+ def __init__(self, *args):
+ apply(Exception.__init__, (self,)+args)
+ try:
+ self.response = args[0]
+ except IndexError:
+ self.response = 'No response given'
class NNTPReplyError(NNTPError):
- """Unexpected [123]xx reply"""
- pass
+ """Unexpected [123]xx reply"""
+ pass
class NNTPTemporaryError(NNTPError):
- """4xx errors"""
- pass
+ """4xx errors"""
+ pass
class NNTPPermanentError(NNTPError):
- """5xx errors"""
- pass
+ """5xx errors"""
+ pass
class NNTPProtocolError(NNTPError):
- """Response does not begin with [1-5]"""
- pass
+ """Response does not begin with [1-5]"""
+ pass
class NNTPDataError(NNTPError):
- """Error in response data"""
- pass
+ """Error in response data"""
+ pass
# for backwards compatibility
error_reply = NNTPReplyError
@@ -73,7 +73,7 @@ error_proto = NNTPProtocolError
error_data = NNTPDataError
-
+
# Standard port used by NNTP servers
NNTP_PORT = 119
@@ -86,450 +86,450 @@ LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
CRLF = '\r\n'
-
+
# The class itself
class NNTP:
- def __init__(self, host, port=NNTP_PORT, user=None, password=None,
- readermode=None):
- """Initialize an instance. Arguments:
- - host: hostname to connect to
- - port: port to connect to (default the standard NNTP port)
- - user: username to authenticate with
- - password: password to use with username
- - readermode: if true, send 'mode reader' command after
- connecting.
-
- readermode is sometimes necessary if you are connecting to an
- NNTP server on the local machine and intend to call
- reader-specific comamnds, such as `group'. If you get
- unexpected NNTPPermanentErrors, you might need to set
- readermode.
- """
- self.host = host
- self.port = port
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.connect((self.host, self.port))
- self.file = self.sock.makefile('rb')
- self.debugging = 0
- self.welcome = self.getresp()
- if readermode:
- try:
- self.welcome = self.shortcmd('mode reader')
- except NNTPPermanentError:
- # error 500, probably 'not implemented'
- pass
- if user:
- resp = self.shortcmd('authinfo user '+user)
- if resp[:3] == '381':
- if not password:
- raise NNTPReplyError(resp)
- else:
- resp = self.shortcmd(
- 'authinfo pass '+password)
- if resp[:3] != '281':
- raise NNTPPermanentError(resp)
-
- # Get the welcome message from the server
- # (this is read and squirreled away by __init__()).
- # If the response code is 200, posting is allowed;
- # if it 201, posting is not allowed
-
- def getwelcome(self):
- """Get the welcome message from the server
- (this is read and squirreled away by __init__()).
- If the response code is 200, posting is allowed;
- if it 201, posting is not allowed."""
-
- if self.debugging: print '*welcome*', `self.welcome`
- return self.welcome
-
- def set_debuglevel(self, level):
- """Set the debugging level. Argument 'level' means:
- 0: no debugging output (default)
- 1: print commands and responses but not body text etc.
- 2: also print raw lines read and sent before stripping CR/LF"""
-
- self.debugging = level
- debug = set_debuglevel
-
- def putline(self, line):
- """Internal: send one line to the server, appending CRLF."""
- line = line + CRLF
- if self.debugging > 1: print '*put*', `line`
- self.sock.send(line)
-
- def putcmd(self, line):
- """Internal: send one command to the server (through putline())."""
- if self.debugging: print '*cmd*', `line`
- self.putline(line)
-
- def getline(self):
- """Internal: return one line from the server, stripping CRLF.
- Raise EOFError if the connection is closed."""
- line = self.file.readline()
- if self.debugging > 1:
- print '*get*', `line`
- if not line: raise EOFError
- if line[-2:] == CRLF: line = line[:-2]
- elif line[-1:] in CRLF: line = line[:-1]
- return line
-
- def getresp(self):
- """Internal: get a response from the server.
- Raise various errors if the response indicates an error."""
- resp = self.getline()
- if self.debugging: print '*resp*', `resp`
- c = resp[:1]
- if c == '4':
- raise NNTPTemporaryError(resp)
- if c == '5':
- raise NNTPPermanentError(resp)
- if c not in '123':
- raise NNTPProtocolError(resp)
- return resp
-
- def getlongresp(self):
- """Internal: get a response plus following text from the server.
- Raise various errors if the response indicates an error."""
- resp = self.getresp()
- if resp[:3] not in LONGRESP:
- raise NNTPReplyError(resp)
- list = []
- while 1:
- line = self.getline()
- if line == '.':
- break
- if line[:2] == '..':
- line = line[1:]
- list.append(line)
- return resp, list
-
- def shortcmd(self, line):
- """Internal: send a command and get the response."""
- self.putcmd(line)
- return self.getresp()
-
- def longcmd(self, line):
- """Internal: send a command and get the response plus following text."""
- self.putcmd(line)
- return self.getlongresp()
-
- def newgroups(self, date, time):
- """Process a NEWGROUPS command. Arguments:
- - date: string 'yymmdd' indicating the date
- - time: string 'hhmmss' indicating the time
- Return:
- - resp: server response if successful
- - list: list of newsgroup names"""
-
- return self.longcmd('NEWGROUPS ' + date + ' ' + time)
-
- def newnews(self, group, date, time):
- """Process a NEWNEWS command. Arguments:
- - group: group name or '*'
- - date: string 'yymmdd' indicating the date
- - time: string 'hhmmss' indicating the time
- Return:
- - resp: server response if successful
- - list: list of article ids"""
-
- cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
- return self.longcmd(cmd)
-
- def list(self):
- """Process a LIST command. Return:
- - resp: server response if successful
- - list: list of (group, last, first, flag) (strings)"""
-
- resp, list = self.longcmd('LIST')
- for i in range(len(list)):
- # Parse lines into "group last first flag"
- list[i] = tuple(string.split(list[i]))
- return resp, list
-
- def group(self, name):
- """Process a GROUP command. Argument:
- - group: the group name
- Returns:
- - resp: server response if successful
- - count: number of articles (string)
- - first: first article number (string)
- - last: last article number (string)
- - name: the group name"""
-
- resp = self.shortcmd('GROUP ' + name)
- if resp[:3] != '211':
- raise NNTPReplyError(resp)
- words = string.split(resp)
- count = first = last = 0
- n = len(words)
- if n > 1:
- count = words[1]
- if n > 2:
- first = words[2]
- if n > 3:
- last = words[3]
- if n > 4:
- name = string.lower(words[4])
- return resp, count, first, last, name
-
- def help(self):
- """Process a HELP command. Returns:
- - resp: server response if successful
- - list: list of strings"""
-
- return self.longcmd('HELP')
-
- def statparse(self, resp):
- """Internal: parse the response of a STAT, NEXT or LAST command."""
- if resp[:2] != '22':
- raise NNTPReplyError(resp)
- words = string.split(resp)
- nr = 0
- id = ''
- n = len(words)
- if n > 1:
- nr = words[1]
- if n > 2:
- id = words[2]
- return resp, nr, id
-
- def statcmd(self, line):
- """Internal: process a STAT, NEXT or LAST command."""
- resp = self.shortcmd(line)
- return self.statparse(resp)
-
- def stat(self, id):
- """Process a STAT command. Argument:
- - id: article number or message id
- Returns:
- - resp: server response if successful
- - nr: the article number
- - id: the article id"""
-
- return self.statcmd('STAT ' + id)
-
- def next(self):
- """Process a NEXT command. No arguments. Return as for STAT."""
- return self.statcmd('NEXT')
-
- def last(self):
- """Process a LAST command. No arguments. Return as for STAT."""
- return self.statcmd('LAST')
-
- def artcmd(self, line):
- """Internal: process a HEAD, BODY or ARTICLE command."""
- resp, list = self.longcmd(line)
- resp, nr, id = self.statparse(resp)
- return resp, nr, id, list
-
- def head(self, id):
- """Process a HEAD command. Argument:
- - id: article number or message id
- Returns:
- - resp: server response if successful
- - nr: article number
- - id: message id
- - list: the lines of the article's header"""
-
- return self.artcmd('HEAD ' + id)
-
- def body(self, id):
- """Process a BODY command. Argument:
- - id: article number or message id
- Returns:
- - resp: server response if successful
- - nr: article number
- - id: message id
- - list: the lines of the article's body"""
-
- return self.artcmd('BODY ' + id)
-
- def article(self, id):
- """Process an ARTICLE command. Argument:
- - id: article number or message id
- Returns:
- - resp: server response if successful
- - nr: article number
- - id: message id
- - list: the lines of the article"""
-
- return self.artcmd('ARTICLE ' + id)
-
- def slave(self):
- """Process a SLAVE command. Returns:
- - resp: server response if successful"""
-
- return self.shortcmd('SLAVE')
-
- def xhdr(self, hdr, str):
- """Process an XHDR command (optional server extension). Arguments:
- - hdr: the header type (e.g. 'subject')
- - str: an article nr, a message id, or a range nr1-nr2
- Returns:
- - resp: server response if successful
- - list: list of (nr, value) strings"""
-
- pat = re.compile('^([0-9]+) ?(.*)\n?')
- resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str)
- for i in range(len(lines)):
- line = lines[i]
- m = pat.match(line)
- if m:
- lines[i] = m.group(1, 2)
- return resp, lines
-
- def xover(self,start,end):
- """Process an XOVER command (optional server extension) Arguments:
- - start: start of range
- - end: end of range
- Returns:
- - resp: server response if successful
- - list: list of (art-nr, subject, poster, date,
- id, references, size, lines)"""
-
- resp, lines = self.longcmd('XOVER ' + start + '-' + end)
- xover_lines = []
- for line in lines:
- elem = string.splitfields(line,"\t")
- try:
- xover_lines.append((elem[0],
- elem[1],
- elem[2],
- elem[3],
- elem[4],
- string.split(elem[5]),
- elem[6],
- elem[7]))
- except IndexError:
- raise NNTPDataError(line)
- return resp,xover_lines
-
- def xgtitle(self, group):
- """Process an XGTITLE command (optional server extension) Arguments:
- - group: group name wildcard (i.e. news.*)
- Returns:
- - resp: server response if successful
- - list: list of (name,title) strings"""
-
- line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
- resp, raw_lines = self.longcmd('XGTITLE ' + group)
- lines = []
- for raw_line in raw_lines:
- match = line_pat.search(string.strip(raw_line))
- if match:
- lines.append(match.group(1, 2))
- return resp, lines
-
- def xpath(self,id):
- """Process an XPATH command (optional server extension) Arguments:
- - id: Message id of article
- Returns:
- resp: server response if successful
- path: directory path to article"""
-
- resp = self.shortcmd("XPATH " + id)
- if resp[:3] != '223':
- raise NNTPReplyError(resp)
- try:
- [resp_num, path] = string.split(resp)
- except ValueError:
- raise NNTPReplyError(resp)
- else:
- return resp, path
-
- def date (self):
- """Process the DATE command. Arguments:
- None
- Returns:
- resp: server response if successful
- date: Date suitable for newnews/newgroups commands etc.
- time: Time suitable for newnews/newgroups commands etc."""
-
- resp = self.shortcmd("DATE")
- if resp[:3] != '111':
- raise NNTPReplyError(resp)
- elem = string.split(resp)
- if len(elem) != 2:
- raise NNTPDataError(resp)
- date = elem[1][2:8]
- time = elem[1][-6:]
- if len(date) != 6 or len(time) != 6:
- raise NNTPDataError(resp)
- return resp, date, time
-
-
- def post(self, f):
- """Process a POST command. Arguments:
- - f: file containing the article
- Returns:
- - resp: server response if successful"""
-
- resp = self.shortcmd('POST')
- # Raises error_??? if posting is not allowed
- if resp[0] != '3':
- raise NNTPReplyError(resp)
- while 1:
- line = f.readline()
- if not line:
- break
- if line[-1] == '\n':
- line = line[:-1]
- if line[:1] == '.':
- line = '.' + line
- self.putline(line)
- self.putline('.')
- return self.getresp()
-
- def ihave(self, id, f):
- """Process an IHAVE command. Arguments:
- - id: message-id of the article
- - f: file containing the article
- Returns:
- - resp: server response if successful
- Note that if the server refuses the article an exception is raised."""
-
- resp = self.shortcmd('IHAVE ' + id)
- # Raises error_??? if the server already has it
- if resp[0] != '3':
- raise NNTPReplyError(resp)
- while 1:
- line = f.readline()
- if not line:
- break
- if line[-1] == '\n':
- line = line[:-1]
- if line[:1] == '.':
- line = '.' + line
- self.putline(line)
- self.putline('.')
- return self.getresp()
-
- def quit(self):
- """Process a QUIT command and close the socket. Returns:
- - resp: server response if successful"""
-
- resp = self.shortcmd('QUIT')
- self.file.close()
- self.sock.close()
- del self.file, self.sock
- return resp
+ def __init__(self, host, port=NNTP_PORT, user=None, password=None,
+ readermode=None):
+ """Initialize an instance. Arguments:
+ - host: hostname to connect to
+ - port: port to connect to (default the standard NNTP port)
+ - user: username to authenticate with
+ - password: password to use with username
+ - readermode: if true, send 'mode reader' command after
+ connecting.
+
+ readermode is sometimes necessary if you are connecting to an
+ NNTP server on the local machine and intend to call
+ reader-specific comamnds, such as `group'. If you get
+ unexpected NNTPPermanentErrors, you might need to set
+ readermode.
+ """
+ self.host = host
+ self.port = port
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((self.host, self.port))
+ self.file = self.sock.makefile('rb')
+ self.debugging = 0
+ self.welcome = self.getresp()
+ if readermode:
+ try:
+ self.welcome = self.shortcmd('mode reader')
+ except NNTPPermanentError:
+ # error 500, probably 'not implemented'
+ pass
+ if user:
+ resp = self.shortcmd('authinfo user '+user)
+ if resp[:3] == '381':
+ if not password:
+ raise NNTPReplyError(resp)
+ else:
+ resp = self.shortcmd(
+ 'authinfo pass '+password)
+ if resp[:3] != '281':
+ raise NNTPPermanentError(resp)
+
+ # Get the welcome message from the server
+ # (this is read and squirreled away by __init__()).
+ # If the response code is 200, posting is allowed;
+ # if it 201, posting is not allowed
+
+ def getwelcome(self):
+ """Get the welcome message from the server
+ (this is read and squirreled away by __init__()).
+ If the response code is 200, posting is allowed;
+ if it 201, posting is not allowed."""
+
+ if self.debugging: print '*welcome*', `self.welcome`
+ return self.welcome
+
+ def set_debuglevel(self, level):
+ """Set the debugging level. Argument 'level' means:
+ 0: no debugging output (default)
+ 1: print commands and responses but not body text etc.
+ 2: also print raw lines read and sent before stripping CR/LF"""
+
+ self.debugging = level
+ debug = set_debuglevel
+
+ def putline(self, line):
+ """Internal: send one line to the server, appending CRLF."""
+ line = line + CRLF
+ if self.debugging > 1: print '*put*', `line`
+ self.sock.send(line)
+
+ def putcmd(self, line):
+ """Internal: send one command to the server (through putline())."""
+ if self.debugging: print '*cmd*', `line`
+ self.putline(line)
+
+ def getline(self):
+ """Internal: return one line from the server, stripping CRLF.
+ Raise EOFError if the connection is closed."""
+ line = self.file.readline()
+ if self.debugging > 1:
+ print '*get*', `line`
+ if not line: raise EOFError
+ if line[-2:] == CRLF: line = line[:-2]
+ elif line[-1:] in CRLF: line = line[:-1]
+ return line
+
+ def getresp(self):
+ """Internal: get a response from the server.
+ Raise various errors if the response indicates an error."""
+ resp = self.getline()
+ if self.debugging: print '*resp*', `resp`
+ c = resp[:1]
+ if c == '4':
+ raise NNTPTemporaryError(resp)
+ if c == '5':
+ raise NNTPPermanentError(resp)
+ if c not in '123':
+ raise NNTPProtocolError(resp)
+ return resp
+
+ def getlongresp(self):
+ """Internal: get a response plus following text from the server.
+ Raise various errors if the response indicates an error."""
+ resp = self.getresp()
+ if resp[:3] not in LONGRESP:
+ raise NNTPReplyError(resp)
+ list = []
+ while 1:
+ line = self.getline()
+ if line == '.':
+ break
+ if line[:2] == '..':
+ line = line[1:]
+ list.append(line)
+ return resp, list
+
+ def shortcmd(self, line):
+ """Internal: send a command and get the response."""
+ self.putcmd(line)
+ return self.getresp()
+
+ def longcmd(self, line):
+ """Internal: send a command and get the response plus following text."""
+ self.putcmd(line)
+ return self.getlongresp()
+
+ def newgroups(self, date, time):
+ """Process a NEWGROUPS command. Arguments:
+ - date: string 'yymmdd' indicating the date
+ - time: string 'hhmmss' indicating the time
+ Return:
+ - resp: server response if successful
+ - list: list of newsgroup names"""
+
+ return self.longcmd('NEWGROUPS ' + date + ' ' + time)
+
+ def newnews(self, group, date, time):
+ """Process a NEWNEWS command. Arguments:
+ - group: group name or '*'
+ - date: string 'yymmdd' indicating the date
+ - time: string 'hhmmss' indicating the time
+ Return:
+ - resp: server response if successful
+ - list: list of article ids"""
+
+ cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
+ return self.longcmd(cmd)
+
+ def list(self):
+ """Process a LIST command. Return:
+ - resp: server response if successful
+ - list: list of (group, last, first, flag) (strings)"""
+
+ resp, list = self.longcmd('LIST')
+ for i in range(len(list)):
+ # Parse lines into "group last first flag"
+ list[i] = tuple(string.split(list[i]))
+ return resp, list
+
+ def group(self, name):
+ """Process a GROUP command. Argument:
+ - group: the group name
+ Returns:
+ - resp: server response if successful
+ - count: number of articles (string)
+ - first: first article number (string)
+ - last: last article number (string)
+ - name: the group name"""
+
+ resp = self.shortcmd('GROUP ' + name)
+ if resp[:3] != '211':
+ raise NNTPReplyError(resp)
+ words = string.split(resp)
+ count = first = last = 0
+ n = len(words)
+ if n > 1:
+ count = words[1]
+ if n > 2:
+ first = words[2]
+ if n > 3:
+ last = words[3]
+ if n > 4:
+ name = string.lower(words[4])
+ return resp, count, first, last, name
+
+ def help(self):
+ """Process a HELP command. Returns:
+ - resp: server response if successful
+ - list: list of strings"""
+
+ return self.longcmd('HELP')
+
+ def statparse(self, resp):
+ """Internal: parse the response of a STAT, NEXT or LAST command."""
+ if resp[:2] != '22':
+ raise NNTPReplyError(resp)
+ words = string.split(resp)
+ nr = 0
+ id = ''
+ n = len(words)
+ if n > 1:
+ nr = words[1]
+ if n > 2:
+ id = words[2]
+ return resp, nr, id
+
+ def statcmd(self, line):
+ """Internal: process a STAT, NEXT or LAST command."""
+ resp = self.shortcmd(line)
+ return self.statparse(resp)
+
+ def stat(self, id):
+ """Process a STAT command. Argument:
+ - id: article number or message id
+ Returns:
+ - resp: server response if successful
+ - nr: the article number
+ - id: the article id"""
+
+ return self.statcmd('STAT ' + id)
+
+ def next(self):
+ """Process a NEXT command. No arguments. Return as for STAT."""
+ return self.statcmd('NEXT')
+
+ def last(self):
+ """Process a LAST command. No arguments. Return as for STAT."""
+ return self.statcmd('LAST')
+
+ def artcmd(self, line):
+ """Internal: process a HEAD, BODY or ARTICLE command."""
+ resp, list = self.longcmd(line)
+ resp, nr, id = self.statparse(resp)
+ return resp, nr, id, list
+
+ def head(self, id):
+ """Process a HEAD command. Argument:
+ - id: article number or message id
+ Returns:
+ - resp: server response if successful
+ - nr: article number
+ - id: message id
+ - list: the lines of the article's header"""
+
+ return self.artcmd('HEAD ' + id)
+
+ def body(self, id):
+ """Process a BODY command. Argument:
+ - id: article number or message id
+ Returns:
+ - resp: server response if successful
+ - nr: article number
+ - id: message id
+ - list: the lines of the article's body"""
+
+ return self.artcmd('BODY ' + id)
+
+ def article(self, id):
+ """Process an ARTICLE command. Argument:
+ - id: article number or message id
+ Returns:
+ - resp: server response if successful
+ - nr: article number
+ - id: message id
+ - list: the lines of the article"""
+
+ return self.artcmd('ARTICLE ' + id)
+
+ def slave(self):
+ """Process a SLAVE command. Returns:
+ - resp: server response if successful"""
+
+ return self.shortcmd('SLAVE')
+
+ def xhdr(self, hdr, str):
+ """Process an XHDR command (optional server extension). Arguments:
+ - hdr: the header type (e.g. 'subject')
+ - str: an article nr, a message id, or a range nr1-nr2
+ Returns:
+ - resp: server response if successful
+ - list: list of (nr, value) strings"""
+
+ pat = re.compile('^([0-9]+) ?(.*)\n?')
+ resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str)
+ for i in range(len(lines)):
+ line = lines[i]
+ m = pat.match(line)
+ if m:
+ lines[i] = m.group(1, 2)
+ return resp, lines
+
+ def xover(self,start,end):
+ """Process an XOVER command (optional server extension) Arguments:
+ - start: start of range
+ - end: end of range
+ Returns:
+ - resp: server response if successful
+ - list: list of (art-nr, subject, poster, date,
+ id, references, size, lines)"""
+
+ resp, lines = self.longcmd('XOVER ' + start + '-' + end)
+ xover_lines = []
+ for line in lines:
+ elem = string.splitfields(line,"\t")
+ try:
+ xover_lines.append((elem[0],
+ elem[1],
+ elem[2],
+ elem[3],
+ elem[4],
+ string.split(elem[5]),
+ elem[6],
+ elem[7]))
+ except IndexError:
+ raise NNTPDataError(line)
+ return resp,xover_lines
+
+ def xgtitle(self, group):
+ """Process an XGTITLE command (optional server extension) Arguments:
+ - group: group name wildcard (i.e. news.*)
+ Returns:
+ - resp: server response if successful
+ - list: list of (name,title) strings"""
+
+ line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
+ resp, raw_lines = self.longcmd('XGTITLE ' + group)
+ lines = []
+ for raw_line in raw_lines:
+ match = line_pat.search(string.strip(raw_line))
+ if match:
+ lines.append(match.group(1, 2))
+ return resp, lines
+
+ def xpath(self,id):
+ """Process an XPATH command (optional server extension) Arguments:
+ - id: Message id of article
+ Returns:
+ resp: server response if successful
+ path: directory path to article"""
+
+ resp = self.shortcmd("XPATH " + id)
+ if resp[:3] != '223':
+ raise NNTPReplyError(resp)
+ try:
+ [resp_num, path] = string.split(resp)
+ except ValueError:
+ raise NNTPReplyError(resp)
+ else:
+ return resp, path
+
+ def date (self):
+ """Process the DATE command. Arguments:
+ None
+ Returns:
+ resp: server response if successful
+ date: Date suitable for newnews/newgroups commands etc.
+ time: Time suitable for newnews/newgroups commands etc."""
+
+ resp = self.shortcmd("DATE")
+ if resp[:3] != '111':
+ raise NNTPReplyError(resp)
+ elem = string.split(resp)
+ if len(elem) != 2:
+ raise NNTPDataError(resp)
+ date = elem[1][2:8]
+ time = elem[1][-6:]
+ if len(date) != 6 or len(time) != 6:
+ raise NNTPDataError(resp)
+ return resp, date, time
+
+
+ def post(self, f):
+ """Process a POST command. Arguments:
+ - f: file containing the article
+ Returns:
+ - resp: server response if successful"""
+
+ resp = self.shortcmd('POST')
+ # Raises error_??? if posting is not allowed
+ if resp[0] != '3':
+ raise NNTPReplyError(resp)
+ while 1:
+ line = f.readline()
+ if not line:
+ break
+ if line[-1] == '\n':
+ line = line[:-1]
+ if line[:1] == '.':
+ line = '.' + line
+ self.putline(line)
+ self.putline('.')
+ return self.getresp()
+
+ def ihave(self, id, f):
+ """Process an IHAVE command. Arguments:
+ - id: message-id of the article
+ - f: file containing the article
+ Returns:
+ - resp: server response if successful
+ Note that if the server refuses the article an exception is raised."""
+
+ resp = self.shortcmd('IHAVE ' + id)
+ # Raises error_??? if the server already has it
+ if resp[0] != '3':
+ raise NNTPReplyError(resp)
+ while 1:
+ line = f.readline()
+ if not line:
+ break
+ if line[-1] == '\n':
+ line = line[:-1]
+ if line[:1] == '.':
+ line = '.' + line
+ self.putline(line)
+ self.putline('.')
+ return self.getresp()
+
+ def quit(self):
+ """Process a QUIT command and close the socket. Returns:
+ - resp: server response if successful"""
+
+ resp = self.shortcmd('QUIT')
+ self.file.close()
+ self.sock.close()
+ del self.file, self.sock
+ return resp
def _test():
- """Minimal test function."""
- s = NNTP('news', readermode='reader')
- resp, count, first, last, name = s.group('comp.lang.python')
- print resp
- print 'Group', name, 'has', count, 'articles, range', first, 'to', last
- resp, subs = s.xhdr('subject', first + '-' + last)
- print resp
- for item in subs:
- print "%7s %s" % item
- resp = s.quit()
- print resp
+ """Minimal test function."""
+ s = NNTP('news', readermode='reader')
+ resp, count, first, last, name = s.group('comp.lang.python')
+ print resp
+ print 'Group', name, 'has', count, 'articles, range', first, 'to', last
+ resp, subs = s.xhdr('subject', first + '-' + last)
+ print resp
+ for item in subs:
+ print "%7s %s" % item
+ resp = s.quit()
+ print resp
# Run the test when run as a script
if __name__ == '__main__':
- _test()
+ _test()
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index c1f4df7..571a7d0 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -1,5 +1,5 @@
# Module 'ntpath' -- common operations on WinNT/Win95 pathnames
-"""Common pathname manipulations, WindowsNT/95 version.
+"""Common pathname manipulations, WindowsNT/95 version.
Instead of importing this module directly, import os and refer to this
module as os.path.
@@ -254,7 +254,7 @@ def ismount(path):
def walk(top, func, arg):
"""Directory tree walk whth callback function.
- walk(top, func, arg) calls func(arg, d, files) for each directory d
+ walk(top, func, arg) calls func(arg, d, files) for each directory d
in the tree rooted at top (including top itself); files is a list
of all the files and subdirs in directory d."""
try:
@@ -313,7 +313,7 @@ def expanduser(path):
# XXX With COMMAND.COM you can use any characters in a variable name,
# XXX except '^|<>='.
-def expandvars(path):
+def expandvars(path):
"""Expand shell variables of form $var and ${var}.
Unknown variables are left unchanged."""
diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py
index 0445b8a..2d08eee 100644
--- a/Lib/nturl2path.py
+++ b/Lib/nturl2path.py
@@ -1,66 +1,66 @@
"""Convert a NT pathname to a file URL and vice versa."""
def url2pathname(url):
- r"""Convert a URL to a DOS path.
+ r"""Convert a URL to a DOS path.
- ///C|/foo/bar/spam.foo
+ ///C|/foo/bar/spam.foo
- becomes
+ becomes
- C:\foo\bar\spam.foo
- """
- import string, urllib
- if not '|' in url:
- # No drive specifier, just convert slashes
- if url[:4] == '////':
- # path is something like ////host/path/on/remote/host
- # convert this to \\host\path\on\remote\host
- # (notice halving of slashes at the start of the path)
- url = url[2:]
- components = string.split(url, '/')
- # make sure not to convert quoted slashes :-)
- return urllib.unquote(string.join(components, '\\'))
- comp = string.split(url, '|')
- if len(comp) != 2 or comp[0][-1] not in string.letters:
- error = 'Bad URL: ' + url
- raise IOError, error
- drive = string.upper(comp[0][-1])
- components = string.split(comp[1], '/')
- path = drive + ':'
- for comp in components:
- if comp:
- path = path + '\\' + urllib.unquote(comp)
- return path
+ C:\foo\bar\spam.foo
+ """
+ import string, urllib
+ if not '|' in url:
+ # No drive specifier, just convert slashes
+ if url[:4] == '////':
+ # path is something like ////host/path/on/remote/host
+ # convert this to \\host\path\on\remote\host
+ # (notice halving of slashes at the start of the path)
+ url = url[2:]
+ components = string.split(url, '/')
+ # make sure not to convert quoted slashes :-)
+ return urllib.unquote(string.join(components, '\\'))
+ comp = string.split(url, '|')
+ if len(comp) != 2 or comp[0][-1] not in string.letters:
+ error = 'Bad URL: ' + url
+ raise IOError, error
+ drive = string.upper(comp[0][-1])
+ components = string.split(comp[1], '/')
+ path = drive + ':'
+ for comp in components:
+ if comp:
+ path = path + '\\' + urllib.unquote(comp)
+ return path
def pathname2url(p):
- r"""Convert a DOS path name to a file url.
+ r"""Convert a DOS path name to a file url.
- C:\foo\bar\spam.foo
+ C:\foo\bar\spam.foo
- becomes
+ becomes
- ///C|/foo/bar/spam.foo
- """
+ ///C|/foo/bar/spam.foo
+ """
- import string, urllib
- if not ':' in p:
- # No drive specifier, just convert slashes and quote the name
- if p[:2] == '\\\\':
- # path is something like \\host\path\on\remote\host
- # convert this to ////host/path/on/remote/host
- # (notice doubling of slashes at the start of the path)
- p = '\\\\' + p
- components = string.split(p, '\\')
- return urllib.quote(string.join(components, '/'))
- comp = string.split(p, ':')
- if len(comp) != 2 or len(comp[0]) > 1:
- error = 'Bad path: ' + p
- raise IOError, error
+ import string, urllib
+ if not ':' in p:
+ # No drive specifier, just convert slashes and quote the name
+ if p[:2] == '\\\\':
+ # path is something like \\host\path\on\remote\host
+ # convert this to ////host/path/on/remote/host
+ # (notice doubling of slashes at the start of the path)
+ p = '\\\\' + p
+ components = string.split(p, '\\')
+ return urllib.quote(string.join(components, '/'))
+ comp = string.split(p, ':')
+ if len(comp) != 2 or len(comp[0]) > 1:
+ error = 'Bad path: ' + p
+ raise IOError, error
- drive = urllib.quote(string.upper(comp[0]))
- components = string.split(comp[1], '\\')
- path = '///' + drive + '|'
- for comp in components:
- if comp:
- path = path + '/' + urllib.quote(comp)
- return path
+ drive = urllib.quote(string.upper(comp[0]))
+ components = string.split(comp[1], '\\')
+ path = '///' + drive + '|'
+ for comp in components:
+ if comp:
+ path = path + '/' + urllib.quote(comp)
+ return path
diff --git a/Lib/os.py b/Lib/os.py
index ea38021..0ccdad2 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -213,7 +213,7 @@ def execlpe(file, *args):
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env, replacing the current
- process. """
+ process. """
env = args[-1]
execvpe(file, args[:-1], env)
@@ -231,7 +231,7 @@ def execvpe(file, args, env):
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env , replacing the
current process.
- args may be a list or tuple of strings. """
+ args may be a list or tuple of strings. """
_execvpe(file, args, env)
_notfound = None
@@ -370,7 +370,7 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"):
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
-otherwise return -SIG, where SIG is the signal that killed it. """
+otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execv)
def spawnve(mode, file, args, env):
diff --git a/Lib/pdb.py b/Lib/pdb.py
index 697fb88..cb1a4d2 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -14,893 +14,893 @@ import os
import re
def find_function(funcname, filename):
- cre = re.compile(r'def\s+%s\s*[(]' % funcname)
- try:
- fp = open(filename)
- except IOError:
- return None
- # consumer of this info expects the first line to be 1
- lineno = 1
- answer = None
- while 1:
- line = fp.readline()
- if line == '':
- break
- if cre.match(line):
- answer = funcname, filename, lineno
- break
- lineno = lineno + 1
- fp.close()
- return answer
+ cre = re.compile(r'def\s+%s\s*[(]' % funcname)
+ try:
+ fp = open(filename)
+ except IOError:
+ return None
+ # consumer of this info expects the first line to be 1
+ lineno = 1
+ answer = None
+ while 1:
+ line = fp.readline()
+ if line == '':
+ break
+ if cre.match(line):
+ answer = funcname, filename, lineno
+ break
+ lineno = lineno + 1
+ fp.close()
+ return answer
# Interaction prompt line will separate file and call info from code
# text using value of line_prefix string. A newline and arrow may
# be to your liking. You can set it once pdb is imported using the
# command "pdb.line_prefix = '\n% '".
-# line_prefix = ': ' # Use this to get the old situation back
-line_prefix = '\n-> ' # Probably a better default
+# line_prefix = ': ' # Use this to get the old situation back
+line_prefix = '\n-> ' # Probably a better default
class Pdb(bdb.Bdb, cmd.Cmd):
-
- def __init__(self):
- bdb.Bdb.__init__(self)
- cmd.Cmd.__init__(self)
- self.prompt = '(Pdb) '
- self.aliases = {}
- # Try to load readline if it exists
- try:
- import readline
- except ImportError:
- pass
-
- # Read $HOME/.pdbrc and ./.pdbrc
- self.rcLines = []
- if os.environ.has_key('HOME'):
- envHome = os.environ['HOME']
- try:
- rcFile = open(os.path.join(envHome, ".pdbrc"))
- except IOError:
- pass
- else:
- for line in rcFile.readlines():
- self.rcLines.append(line)
- rcFile.close()
- try:
- rcFile = open(".pdbrc")
- except IOError:
- pass
- else:
- for line in rcFile.readlines():
- self.rcLines.append(line)
- rcFile.close()
-
- def reset(self):
- bdb.Bdb.reset(self)
- self.forget()
-
- def forget(self):
- self.lineno = None
- self.stack = []
- self.curindex = 0
- self.curframe = None
-
- def setup(self, f, t):
- self.forget()
- self.stack, self.curindex = self.get_stack(f, t)
- self.curframe = self.stack[self.curindex][0]
- self.execRcLines()
-
- # Can be executed earlier than 'setup' if desired
- def execRcLines(self):
- if self.rcLines:
- # Make local copy because of recursion
- rcLines = self.rcLines
- # executed only once
- self.rcLines = []
- for line in rcLines:
- line = line[:-1]
- if len (line) > 0 and line[0] != '#':
- self.onecmd (line)
-
- # Override Bdb methods (except user_call, for now)
-
- def user_line(self, frame):
- """This function is called when we stop or break at this line."""
- self.interaction(frame, None)
-
- def user_return(self, frame, return_value):
- """This function is called when a return trap is set here."""
- frame.f_locals['__return__'] = return_value
- print '--Return--'
- self.interaction(frame, None)
-
- def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
- """This function is called if an exception occurs,
- but only if we are to stop at or just below this level."""
- frame.f_locals['__exception__'] = exc_type, exc_value
- if type(exc_type) == type(''):
- exc_type_name = exc_type
- else: exc_type_name = exc_type.__name__
- print exc_type_name + ':', repr.repr(exc_value)
- self.interaction(frame, exc_traceback)
-
- # General interaction function
-
- def interaction(self, frame, traceback):
- self.setup(frame, traceback)
- self.print_stack_entry(self.stack[self.curindex])
- self.cmdloop()
- self.forget()
-
- def default(self, line):
- if line[:1] == '!': line = line[1:]
- locals = self.curframe.f_locals
- globals = self.curframe.f_globals
- try:
- code = compile(line + '\n', '<stdin>', 'single')
- exec code in globals, locals
- except:
- t, v = sys.exc_info()[:2]
- if type(t) == type(''):
- exc_type_name = t
- else: exc_type_name = t.__name__
- print '***', exc_type_name + ':', v
-
- def precmd(self, line):
- """Handle alias expansion and ';;' separator."""
- if not line:
- return line
- args = string.split(line)
- while self.aliases.has_key(args[0]):
- line = self.aliases[args[0]]
- ii = 1
- for tmpArg in args[1:]:
- line = string.replace(line, "%" + str(ii),
- tmpArg)
- ii = ii + 1
- line = string.replace(line, "%*",
- string.join(args[1:], ' '))
- args = string.split(line)
- # split into ';;' separated commands
- # unless it's an alias command
- if args[0] != 'alias':
- marker = string.find(line, ';;')
- if marker >= 0:
- # queue up everything after marker
- next = string.lstrip(line[marker+2:])
- self.cmdqueue.append(next)
- line = string.rstrip(line[:marker])
- return line
-
- # Command definitions, called by cmdloop()
- # The argument is the remaining string on the command line
- # Return true to exit from the command loop
-
- do_h = cmd.Cmd.do_help
-
- def do_EOF(self, arg):
- return 0 # Don't die on EOF
-
- def do_break(self, arg, temporary = 0):
- # break [ ([filename:]lineno | function) [, "condition"] ]
- if not arg:
- if self.breaks: # There's at least one
- print "Num Type Disp Enb Where"
- for bp in bdb.Breakpoint.bpbynumber:
- if bp:
- bp.bpprint()
- return
- # parse arguments; comma has lowest precedence
- # and cannot occur in filename
- filename = None
- lineno = None
- cond = None
- comma = string.find(arg, ',')
- if comma > 0:
- # parse stuff after comma: "condition"
- cond = string.lstrip(arg[comma+1:])
- arg = string.rstrip(arg[:comma])
- # parse stuff before comma: [filename:]lineno | function
- colon = string.rfind(arg, ':')
- if colon >= 0:
- filename = string.rstrip(arg[:colon])
- f = self.lookupmodule(filename)
- if not f:
- print '*** ', `filename`,
- print 'not found from sys.path'
- return
- else:
- filename = f
- arg = string.lstrip(arg[colon+1:])
- try:
- lineno = int(arg)
- except ValueError, msg:
- print '*** Bad lineno:', arg
- return
- else:
- # no colon; can be lineno or function
- try:
- lineno = int(arg)
- except ValueError:
- try:
- func = eval(arg,
- self.curframe.f_globals,
- self.curframe.f_locals)
- except:
- func = arg
- try:
- if hasattr(func, 'im_func'):
- func = func.im_func
- code = func.func_code
- lineno = code.co_firstlineno
- filename = code.co_filename
- except:
- # last thing to try
- (ok, filename, ln) = self.lineinfo(arg)
- if not ok:
- print '*** The specified object',
- print `arg`,
- print 'is not a function'
- print ('or was not found '
- 'along sys.path.')
- return
- lineno = int(ln)
- if not filename:
- filename = self.defaultFile()
- # Check for reasonable breakpoint
- line = self.checkline(filename, lineno)
- if line:
- # now set the break point
- err = self.set_break(filename, line, temporary, cond)
- if err: print '***', err
- else:
- bp = self.get_breaks(filename, line)[-1]
- print "Breakpoint %d at %s:%d" % (bp.number,
- bp.file,
- bp.line)
-
- # To be overridden in derived debuggers
- def defaultFile(self):
- """Produce a reasonable default."""
- filename = self.curframe.f_code.co_filename
- if filename == '<string>' and mainpyfile:
- filename = mainpyfile
- return filename
-
- do_b = do_break
-
- def do_tbreak(self, arg):
- self.do_break(arg, 1)
-
- def lineinfo(self, identifier):
- failed = (None, None, None)
- # Input is identifier, may be in single quotes
- idstring = string.split(identifier, "'")
- if len(idstring) == 1:
- # not in single quotes
- id = string.strip(idstring[0])
- elif len(idstring) == 3:
- # quoted
- id = string.strip(idstring[1])
- else:
- return failed
- if id == '': return failed
- parts = string.split(id, '.')
- # Protection for derived debuggers
- if parts[0] == 'self':
- del parts[0]
- if len(parts) == 0:
- return failed
- # Best first guess at file to look at
- fname = self.defaultFile()
- if len(parts) == 1:
- item = parts[0]
- else:
- # More than one part.
- # First is module, second is method/class
- f = self.lookupmodule(parts[0])
- if f:
- fname = f
- item = parts[1]
- answer = find_function(item, fname)
- return answer or failed
-
- def checkline(self, filename, lineno):
- """Return line number of first line at or after input
- argument such that if the input points to a 'def', the
- returned line number is the first
- non-blank/non-comment line to follow. If the input
- points to a blank or comment line, return 0. At end
- of file, also return 0."""
-
- line = linecache.getline(filename, lineno)
- if not line:
- print 'End of file'
- return 0
- line = string.strip(line)
- # Don't allow setting breakpoint at a blank line
- if ( not line or (line[0] == '#') or
- (line[:3] == '"""') or line[:3] == "'''" ):
- print '*** Blank or comment'
- return 0
- # When a file is read in and a breakpoint is at
- # the 'def' statement, the system stops there at
- # code parse time. We don't want that, so all breakpoints
- # set at 'def' statements are moved one line onward
- if line[:3] == 'def':
- instr = ''
- brackets = 0
- while 1:
- skipone = 0
- for c in line:
- if instr:
- if skipone:
- skipone = 0
- elif c == '\\':
- skipone = 1
- elif c == instr:
- instr = ''
- elif c == '#':
- break
- elif c in ('"',"'"):
- instr = c
- elif c in ('(','{','['):
- brackets = brackets + 1
- elif c in (')','}',']'):
- brackets = brackets - 1
- lineno = lineno+1
- line = linecache.getline(filename, lineno)
- if not line:
- print 'end of file'
- return 0
- line = string.strip(line)
- if not line: continue # Blank line
- if brackets <= 0 and line[0] not in ('#','"',"'"):
- break
- return lineno
-
- def do_enable(self, arg):
- args = string.split(arg)
- for i in args:
- bp = bdb.Breakpoint.bpbynumber[int(i)]
- if bp:
- bp.enable()
-
- def do_disable(self, arg):
- args = string.split(arg)
- for i in args:
- bp = bdb.Breakpoint.bpbynumber[int(i)]
- if bp:
- bp.disable()
-
- def do_condition(self, arg):
- # arg is breakpoint number and condition
- args = string.split(arg, ' ', 1)
- bpnum = int(string.strip(args[0]))
- try:
- cond = args[1]
- except:
- cond = None
- bp = bdb.Breakpoint.bpbynumber[bpnum]
- if bp:
- bp.cond = cond
- if not cond:
- print 'Breakpoint', bpnum,
- print 'is now unconditional.'
-
- def do_ignore(self,arg):
- """arg is bp number followed by ignore count."""
- args = string.split(arg)
- bpnum = int(string.strip(args[0]))
- try:
- count = int(string.strip(args[1]))
- except:
- count = 0
- bp = bdb.Breakpoint.bpbynumber[bpnum]
- if bp:
- bp.ignore = count
- if (count > 0):
- reply = 'Will ignore next '
- if (count > 1):
- reply = reply + '%d crossings' % count
- else:
- reply = reply + '1 crossing'
- print reply + ' of breakpoint %d.' % bpnum
- else:
- print 'Will stop next time breakpoint',
- print bpnum, 'is reached.'
-
- def do_clear(self, arg):
- """Three possibilities, tried in this order:
- clear -> clear all breaks, ask for confirmation
- clear file:lineno -> clear all breaks at file:lineno
- clear bpno bpno ... -> clear breakpoints by number"""
- if not arg:
- try:
- reply = raw_input('Clear all breaks? ')
- except EOFError:
- reply = 'no'
- reply = string.lower(string.strip(reply))
- if reply in ('y', 'yes'):
- self.clear_all_breaks()
- return
- if ':' in arg:
- # Make sure it works for "clear C:\foo\bar.py:12"
- i = string.rfind(arg, ':')
- filename = arg[:i]
- arg = arg[i+1:]
- try:
- lineno = int(arg)
- except:
- err = "Invalid line number (%s)" % arg
- else:
- err = self.clear_break(filename, lineno)
- if err: print '***', err
- return
- numberlist = string.split(arg)
- for i in numberlist:
- err = self.clear_bpbynumber(i)
- if err:
- print '***', err
- else:
- print 'Deleted breakpoint %s ' % (i,)
- do_cl = do_clear # 'c' is already an abbreviation for 'continue'
-
- def do_where(self, arg):
- self.print_stack_trace()
- do_w = do_where
-
- def do_up(self, arg):
- if self.curindex == 0:
- print '*** Oldest frame'
- else:
- self.curindex = self.curindex - 1
- self.curframe = self.stack[self.curindex][0]
- self.print_stack_entry(self.stack[self.curindex])
- self.lineno = None
- do_u = do_up
-
- def do_down(self, arg):
- if self.curindex + 1 == len(self.stack):
- print '*** Newest frame'
- else:
- self.curindex = self.curindex + 1
- self.curframe = self.stack[self.curindex][0]
- self.print_stack_entry(self.stack[self.curindex])
- self.lineno = None
- do_d = do_down
-
- def do_step(self, arg):
- self.set_step()
- return 1
- do_s = do_step
-
- def do_next(self, arg):
- self.set_next(self.curframe)
- return 1
- do_n = do_next
-
- def do_return(self, arg):
- self.set_return(self.curframe)
- return 1
- do_r = do_return
-
- def do_continue(self, arg):
- self.set_continue()
- return 1
- do_c = do_cont = do_continue
-
- def do_quit(self, arg):
- self.set_quit()
- return 1
- do_q = do_quit
-
- def do_args(self, arg):
- f = self.curframe
- co = f.f_code
- dict = f.f_locals
- n = co.co_argcount
- if co.co_flags & 4: n = n+1
- if co.co_flags & 8: n = n+1
- for i in range(n):
- name = co.co_varnames[i]
- print name, '=',
- if dict.has_key(name): print dict[name]
- else: print "*** undefined ***"
- do_a = do_args
-
- def do_retval(self, arg):
- if self.curframe.f_locals.has_key('__return__'):
- print self.curframe.f_locals['__return__']
- else:
- print '*** Not yet returned!'
- do_rv = do_retval
-
- def do_p(self, arg):
- try:
- value = eval(arg, self.curframe.f_globals,
- self.curframe.f_locals)
- except:
- t, v = sys.exc_info()[:2]
- if type(t) == type(''):
- exc_type_name = t
- else: exc_type_name = t.__name__
- print '***', exc_type_name + ':', `v`
- return
-
- print `value`
-
- def do_list(self, arg):
- self.lastcmd = 'list'
- last = None
- if arg:
- try:
- x = eval(arg, {}, {})
- if type(x) == type(()):
- first, last = x
- first = int(first)
- last = int(last)
- if last < first:
- # Assume it's a count
- last = first + last
- else:
- first = max(1, int(x) - 5)
- except:
- print '*** Error in argument:', `arg`
- return
- elif self.lineno is None:
- first = max(1, self.curframe.f_lineno - 5)
- else:
- first = self.lineno + 1
- if last is None:
- last = first + 10
- filename = self.curframe.f_code.co_filename
- breaklist = self.get_file_breaks(filename)
- try:
- for lineno in range(first, last+1):
- line = linecache.getline(filename, lineno)
- if not line:
- print '[EOF]'
- break
- else:
- s = string.rjust(`lineno`, 3)
- if len(s) < 4: s = s + ' '
- if lineno in breaklist: s = s + 'B'
- else: s = s + ' '
- if lineno == self.curframe.f_lineno:
- s = s + '->'
- print s + '\t' + line,
- self.lineno = lineno
- except KeyboardInterrupt:
- pass
- do_l = do_list
-
- def do_whatis(self, arg):
- try:
- value = eval(arg, self.curframe.f_globals,
- self.curframe.f_locals)
- except:
- t, v = sys.exc_info()[:2]
- if type(t) == type(''):
- exc_type_name = t
- else: exc_type_name = t.__name__
- print '***', exc_type_name + ':', `v`
- return
- code = None
- # Is it a function?
- try: code = value.func_code
- except: pass
- if code:
- print 'Function', code.co_name
- return
- # Is it an instance method?
- try: code = value.im_func.func_code
- except: pass
- if code:
- print 'Method', code.co_name
- return
- # None of the above...
- print type(value)
-
- def do_alias(self, arg):
- args = string.split (arg)
- if len(args) == 0:
- keys = self.aliases.keys()
- keys.sort()
- for alias in keys:
- print "%s = %s" % (alias, self.aliases[alias])
- return
- if self.aliases.has_key(args[0]) and len (args) == 1:
- print "%s = %s" % (args[0], self.aliases[args[0]])
- else:
- self.aliases[args[0]] = string.join(args[1:], ' ')
-
- def do_unalias(self, arg):
- args = string.split (arg)
- if len(args) == 0: return
- if self.aliases.has_key(args[0]):
- del self.aliases[args[0]]
-
- # Print a traceback starting at the top stack frame.
- # The most recently entered frame is printed last;
- # this is different from dbx and gdb, but consistent with
- # the Python interpreter's stack trace.
- # It is also consistent with the up/down commands (which are
- # compatible with dbx and gdb: up moves towards 'main()'
- # and down moves towards the most recent stack frame).
-
- def print_stack_trace(self):
- try:
- for frame_lineno in self.stack:
- self.print_stack_entry(frame_lineno)
- except KeyboardInterrupt:
- pass
-
- def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
- frame, lineno = frame_lineno
- if frame is self.curframe:
- print '>',
- else:
- print ' ',
- print self.format_stack_entry(frame_lineno, prompt_prefix)
-
-
- # Help methods (derived from pdb.doc)
-
- def help_help(self):
- self.help_h()
-
- def help_h(self):
- print """h(elp)
- Without argument, print the list of available commands.
- With a command name as argument, print help about that command
- "help pdb" pipes the full documentation file to the $PAGER
- "help exec" gives help on the ! command"""
-
- def help_where(self):
- self.help_w()
-
- def help_w(self):
- print """w(here)
- Print a stack trace, with the most recent frame at the bottom.
- An arrow indicates the "current frame", which determines the
- context of most commands."""
-
- def help_down(self):
- self.help_d()
-
- def help_d(self):
- print """d(own)
- Move the current frame one level down in the stack trace
- (to an older frame)."""
-
- def help_up(self):
- self.help_u()
-
- def help_u(self):
- print """u(p)
- Move the current frame one level up in the stack trace
- (to a newer frame)."""
-
- def help_break(self):
- self.help_b()
-
- def help_b(self):
- print """b(reak) ([file:]lineno | function) [, condition]
- With a line number argument, set a break there in the current
- file. With a function name, set a break at first executable line
- of that function. Without argument, list all breaks. If a second
- argument is present, it is a string specifying an expression
- which must evaluate to true before the breakpoint is honored.
-
- The line number may be prefixed with a filename and a colon,
- to specify a breakpoint in another file (probably one that
- hasn't been loaded yet). The file is searched for on sys.path;
- the .py suffix may be omitted."""
-
- def help_clear(self):
- self.help_cl()
-
- def help_cl(self):
- print "cl(ear) filename:lineno"
- print """cl(ear) [bpnumber [bpnumber...]]
- With a space separated list of breakpoint numbers, clear
- those breakpoints. Without argument, clear all breaks (but
- first ask confirmation). With a filename:lineno argument,
- clear all breaks at that line in that file.
-
- Note that the argument is different from previous versions of
- the debugger (in python distributions 1.5.1 and before) where
- a linenumber was used instead of either filename:lineno or
- breakpoint numbers."""
-
- def help_tbreak(self):
- print """tbreak same arguments as break, but breakpoint is
- removed when first hit."""
-
- def help_enable(self):
- print """enable bpnumber [bpnumber ...]
- Enables the breakpoints given as a space separated list of
- bp numbers."""
-
- def help_disable(self):
- print """disable bpnumber [bpnumber ...]
- Disables the breakpoints given as a space separated list of
- bp numbers."""
-
- def help_ignore(self):
- print """ignore bpnumber count
- Sets the ignore count for the given breakpoint number. A breakpoint
- becomes active when the ignore count is zero. When non-zero, the
- count is decremented each time the breakpoint is reached and the
- breakpoint is not disabled and any associated condition evaluates
- to true."""
-
- def help_condition(self):
- print """condition bpnumber str_condition
- str_condition is a string specifying an expression which
- must evaluate to true before the breakpoint is honored.
- If str_condition is absent, any existing condition is removed;
- i.e., the breakpoint is made unconditional."""
-
- def help_step(self):
- self.help_s()
-
- def help_s(self):
- print """s(tep)
- Execute the current line, stop at the first possible occasion
- (either in a function that is called or in the current function)."""
-
- def help_next(self):
- self.help_n()
-
- def help_n(self):
- print """n(ext)
- Continue execution until the next line in the current function
- is reached or it returns."""
-
- def help_return(self):
- self.help_r()
-
- def help_r(self):
- print """r(eturn)
- Continue execution until the current function returns."""
-
- def help_continue(self):
- self.help_c()
-
- def help_cont(self):
- self.help_c()
-
- def help_c(self):
- print """c(ont(inue))
- Continue execution, only stop when a breakpoint is encountered."""
-
- def help_list(self):
- self.help_l()
-
- def help_l(self):
- print """l(ist) [first [,last]]
- List source code for the current file.
- Without arguments, list 11 lines around the current line
- or continue the previous listing.
- With one argument, list 11 lines starting at that line.
- With two arguments, list the given range;
- if the second argument is less than the first, it is a count."""
-
- def help_args(self):
- self.help_a()
-
- def help_a(self):
- print """a(rgs)
- Print the arguments of the current function."""
-
- def help_p(self):
- print """p expression
- Print the value of the expression."""
-
- def help_exec(self):
- print """(!) statement
- Execute the (one-line) statement in the context of
- the current stack frame.
- The exclamation point can be omitted unless the first word
- of the statement resembles a debugger command.
- To assign to a global variable you must always prefix the
- command with a 'global' command, e.g.:
- (Pdb) global list_options; list_options = ['-l']
- (Pdb)"""
-
- def help_quit(self):
- self.help_q()
-
- def help_q(self):
- print """q(uit) Quit from the debugger.
- The program being executed is aborted."""
-
- def help_whatis(self):
- print """whatis arg
- Prints the type of the argument."""
-
- def help_EOF(self):
- print """EOF
- Handles the receipt of EOF as a command."""
-
- def help_alias(self):
- print """alias [name [command [parameter parameter ...] ]]
- Creates an alias called 'name' the executes 'command'. The command
- must *not* be enclosed in quotes. Replaceable parameters are
- indicated by %1, %2, and so on, while %* is replaced by all the
- parameters. If no command is given, the current alias for name
- is shown. If no name is given, all aliases are listed.
-
- Aliases may be nested and can contain anything that can be
- legally typed at the pdb prompt. Note! You *can* override
- internal pdb commands with aliases! Those internal commands
- are then hidden until the alias is removed. Aliasing is recursively
- applied to the first word of the command line; all other words
- in the line are left alone.
-
- Some useful aliases (especially when placed in the .pdbrc file) are:
-
- #Print instance variables (usage "pi classInst")
- alias pi for k in %1.__dict__.keys(): print "%1.",k,"=",%1.__dict__[k]
-
- #Print instance variables in self
- alias ps pi self
- """
-
- def help_unalias(self):
- print """unalias name
- Deletes the specified alias."""
-
- def help_pdb(self):
- help()
-
- def lookupmodule(self, filename):
- """Helper function for break/clear parsing -- may be overridden."""
- root, ext = os.path.splitext(filename)
- if ext == '':
- filename = filename + '.py'
- if os.path.isabs(filename):
- return filename
- for dirname in sys.path:
- while os.path.islink(dirname):
- dirname = os.readlink(dirname)
- fullname = os.path.join(dirname, filename)
- if os.path.exists(fullname):
- return fullname
- return None
+
+ def __init__(self):
+ bdb.Bdb.__init__(self)
+ cmd.Cmd.__init__(self)
+ self.prompt = '(Pdb) '
+ self.aliases = {}
+ # Try to load readline if it exists
+ try:
+ import readline
+ except ImportError:
+ pass
+
+ # Read $HOME/.pdbrc and ./.pdbrc
+ self.rcLines = []
+ if os.environ.has_key('HOME'):
+ envHome = os.environ['HOME']
+ try:
+ rcFile = open(os.path.join(envHome, ".pdbrc"))
+ except IOError:
+ pass
+ else:
+ for line in rcFile.readlines():
+ self.rcLines.append(line)
+ rcFile.close()
+ try:
+ rcFile = open(".pdbrc")
+ except IOError:
+ pass
+ else:
+ for line in rcFile.readlines():
+ self.rcLines.append(line)
+ rcFile.close()
+
+ def reset(self):
+ bdb.Bdb.reset(self)
+ self.forget()
+
+ def forget(self):
+ self.lineno = None
+ self.stack = []
+ self.curindex = 0
+ self.curframe = None
+
+ def setup(self, f, t):
+ self.forget()
+ self.stack, self.curindex = self.get_stack(f, t)
+ self.curframe = self.stack[self.curindex][0]
+ self.execRcLines()
+
+ # Can be executed earlier than 'setup' if desired
+ def execRcLines(self):
+ if self.rcLines:
+ # Make local copy because of recursion
+ rcLines = self.rcLines
+ # executed only once
+ self.rcLines = []
+ for line in rcLines:
+ line = line[:-1]
+ if len (line) > 0 and line[0] != '#':
+ self.onecmd (line)
+
+ # Override Bdb methods (except user_call, for now)
+
+ def user_line(self, frame):
+ """This function is called when we stop or break at this line."""
+ self.interaction(frame, None)
+
+ def user_return(self, frame, return_value):
+ """This function is called when a return trap is set here."""
+ frame.f_locals['__return__'] = return_value
+ print '--Return--'
+ self.interaction(frame, None)
+
+ def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
+ """This function is called if an exception occurs,
+ but only if we are to stop at or just below this level."""
+ frame.f_locals['__exception__'] = exc_type, exc_value
+ if type(exc_type) == type(''):
+ exc_type_name = exc_type
+ else: exc_type_name = exc_type.__name__
+ print exc_type_name + ':', repr.repr(exc_value)
+ self.interaction(frame, exc_traceback)
+
+ # General interaction function
+
+ def interaction(self, frame, traceback):
+ self.setup(frame, traceback)
+ self.print_stack_entry(self.stack[self.curindex])
+ self.cmdloop()
+ self.forget()
+
+ def default(self, line):
+ if line[:1] == '!': line = line[1:]
+ locals = self.curframe.f_locals
+ globals = self.curframe.f_globals
+ try:
+ code = compile(line + '\n', '<stdin>', 'single')
+ exec code in globals, locals
+ except:
+ t, v = sys.exc_info()[:2]
+ if type(t) == type(''):
+ exc_type_name = t
+ else: exc_type_name = t.__name__
+ print '***', exc_type_name + ':', v
+
+ def precmd(self, line):
+ """Handle alias expansion and ';;' separator."""
+ if not line:
+ return line
+ args = string.split(line)
+ while self.aliases.has_key(args[0]):
+ line = self.aliases[args[0]]
+ ii = 1
+ for tmpArg in args[1:]:
+ line = string.replace(line, "%" + str(ii),
+ tmpArg)
+ ii = ii + 1
+ line = string.replace(line, "%*",
+ string.join(args[1:], ' '))
+ args = string.split(line)
+ # split into ';;' separated commands
+ # unless it's an alias command
+ if args[0] != 'alias':
+ marker = string.find(line, ';;')
+ if marker >= 0:
+ # queue up everything after marker
+ next = string.lstrip(line[marker+2:])
+ self.cmdqueue.append(next)
+ line = string.rstrip(line[:marker])
+ return line
+
+ # Command definitions, called by cmdloop()
+ # The argument is the remaining string on the command line
+ # Return true to exit from the command loop
+
+ do_h = cmd.Cmd.do_help
+
+ def do_EOF(self, arg):
+ return 0 # Don't die on EOF
+
+ def do_break(self, arg, temporary = 0):
+ # break [ ([filename:]lineno | function) [, "condition"] ]
+ if not arg:
+ if self.breaks: # There's at least one
+ print "Num Type Disp Enb Where"
+ for bp in bdb.Breakpoint.bpbynumber:
+ if bp:
+ bp.bpprint()
+ return
+ # parse arguments; comma has lowest precedence
+ # and cannot occur in filename
+ filename = None
+ lineno = None
+ cond = None
+ comma = string.find(arg, ',')
+ if comma > 0:
+ # parse stuff after comma: "condition"
+ cond = string.lstrip(arg[comma+1:])
+ arg = string.rstrip(arg[:comma])
+ # parse stuff before comma: [filename:]lineno | function
+ colon = string.rfind(arg, ':')
+ if colon >= 0:
+ filename = string.rstrip(arg[:colon])
+ f = self.lookupmodule(filename)
+ if not f:
+ print '*** ', `filename`,
+ print 'not found from sys.path'
+ return
+ else:
+ filename = f
+ arg = string.lstrip(arg[colon+1:])
+ try:
+ lineno = int(arg)
+ except ValueError, msg:
+ print '*** Bad lineno:', arg
+ return
+ else:
+ # no colon; can be lineno or function
+ try:
+ lineno = int(arg)
+ except ValueError:
+ try:
+ func = eval(arg,
+ self.curframe.f_globals,
+ self.curframe.f_locals)
+ except:
+ func = arg
+ try:
+ if hasattr(func, 'im_func'):
+ func = func.im_func
+ code = func.func_code
+ lineno = code.co_firstlineno
+ filename = code.co_filename
+ except:
+ # last thing to try
+ (ok, filename, ln) = self.lineinfo(arg)
+ if not ok:
+ print '*** The specified object',
+ print `arg`,
+ print 'is not a function'
+ print ('or was not found '
+ 'along sys.path.')
+ return
+ lineno = int(ln)
+ if not filename:
+ filename = self.defaultFile()
+ # Check for reasonable breakpoint
+ line = self.checkline(filename, lineno)
+ if line:
+ # now set the break point
+ err = self.set_break(filename, line, temporary, cond)
+ if err: print '***', err
+ else:
+ bp = self.get_breaks(filename, line)[-1]
+ print "Breakpoint %d at %s:%d" % (bp.number,
+ bp.file,
+ bp.line)
+
+ # To be overridden in derived debuggers
+ def defaultFile(self):
+ """Produce a reasonable default."""
+ filename = self.curframe.f_code.co_filename
+ if filename == '<string>' and mainpyfile:
+ filename = mainpyfile
+ return filename
+
+ do_b = do_break
+
+ def do_tbreak(self, arg):
+ self.do_break(arg, 1)
+
+ def lineinfo(self, identifier):
+ failed = (None, None, None)
+ # Input is identifier, may be in single quotes
+ idstring = string.split(identifier, "'")
+ if len(idstring) == 1:
+ # not in single quotes
+ id = string.strip(idstring[0])
+ elif len(idstring) == 3:
+ # quoted
+ id = string.strip(idstring[1])
+ else:
+ return failed
+ if id == '': return failed
+ parts = string.split(id, '.')
+ # Protection for derived debuggers
+ if parts[0] == 'self':
+ del parts[0]
+ if len(parts) == 0:
+ return failed
+ # Best first guess at file to look at
+ fname = self.defaultFile()
+ if len(parts) == 1:
+ item = parts[0]
+ else:
+ # More than one part.
+ # First is module, second is method/class
+ f = self.lookupmodule(parts[0])
+ if f:
+ fname = f
+ item = parts[1]
+ answer = find_function(item, fname)
+ return answer or failed
+
+ def checkline(self, filename, lineno):
+ """Return line number of first line at or after input
+ argument such that if the input points to a 'def', the
+ returned line number is the first
+ non-blank/non-comment line to follow. If the input
+ points to a blank or comment line, return 0. At end
+ of file, also return 0."""
+
+ line = linecache.getline(filename, lineno)
+ if not line:
+ print 'End of file'
+ return 0
+ line = string.strip(line)
+ # Don't allow setting breakpoint at a blank line
+ if ( not line or (line[0] == '#') or
+ (line[:3] == '"""') or line[:3] == "'''" ):
+ print '*** Blank or comment'
+ return 0
+ # When a file is read in and a breakpoint is at
+ # the 'def' statement, the system stops there at
+ # code parse time. We don't want that, so all breakpoints
+ # set at 'def' statements are moved one line onward
+ if line[:3] == 'def':
+ instr = ''
+ brackets = 0
+ while 1:
+ skipone = 0
+ for c in line:
+ if instr:
+ if skipone:
+ skipone = 0
+ elif c == '\\':
+ skipone = 1
+ elif c == instr:
+ instr = ''
+ elif c == '#':
+ break
+ elif c in ('"',"'"):
+ instr = c
+ elif c in ('(','{','['):
+ brackets = brackets + 1
+ elif c in (')','}',']'):
+ brackets = brackets - 1
+ lineno = lineno+1
+ line = linecache.getline(filename, lineno)
+ if not line:
+ print 'end of file'
+ return 0
+ line = string.strip(line)
+ if not line: continue # Blank line
+ if brackets <= 0 and line[0] not in ('#','"',"'"):
+ break
+ return lineno
+
+ def do_enable(self, arg):
+ args = string.split(arg)
+ for i in args:
+ bp = bdb.Breakpoint.bpbynumber[int(i)]
+ if bp:
+ bp.enable()
+
+ def do_disable(self, arg):
+ args = string.split(arg)
+ for i in args:
+ bp = bdb.Breakpoint.bpbynumber[int(i)]
+ if bp:
+ bp.disable()
+
+ def do_condition(self, arg):
+ # arg is breakpoint number and condition
+ args = string.split(arg, ' ', 1)
+ bpnum = int(string.strip(args[0]))
+ try:
+ cond = args[1]
+ except:
+ cond = None
+ bp = bdb.Breakpoint.bpbynumber[bpnum]
+ if bp:
+ bp.cond = cond
+ if not cond:
+ print 'Breakpoint', bpnum,
+ print 'is now unconditional.'
+
+ def do_ignore(self,arg):
+ """arg is bp number followed by ignore count."""
+ args = string.split(arg)
+ bpnum = int(string.strip(args[0]))
+ try:
+ count = int(string.strip(args[1]))
+ except:
+ count = 0
+ bp = bdb.Breakpoint.bpbynumber[bpnum]
+ if bp:
+ bp.ignore = count
+ if (count > 0):
+ reply = 'Will ignore next '
+ if (count > 1):
+ reply = reply + '%d crossings' % count
+ else:
+ reply = reply + '1 crossing'
+ print reply + ' of breakpoint %d.' % bpnum
+ else:
+ print 'Will stop next time breakpoint',
+ print bpnum, 'is reached.'
+
+ def do_clear(self, arg):
+ """Three possibilities, tried in this order:
+ clear -> clear all breaks, ask for confirmation
+ clear file:lineno -> clear all breaks at file:lineno
+ clear bpno bpno ... -> clear breakpoints by number"""
+ if not arg:
+ try:
+ reply = raw_input('Clear all breaks? ')
+ except EOFError:
+ reply = 'no'
+ reply = string.lower(string.strip(reply))
+ if reply in ('y', 'yes'):
+ self.clear_all_breaks()
+ return
+ if ':' in arg:
+ # Make sure it works for "clear C:\foo\bar.py:12"
+ i = string.rfind(arg, ':')
+ filename = arg[:i]
+ arg = arg[i+1:]
+ try:
+ lineno = int(arg)
+ except:
+ err = "Invalid line number (%s)" % arg
+ else:
+ err = self.clear_break(filename, lineno)
+ if err: print '***', err
+ return
+ numberlist = string.split(arg)
+ for i in numberlist:
+ err = self.clear_bpbynumber(i)
+ if err:
+ print '***', err
+ else:
+ print 'Deleted breakpoint %s ' % (i,)
+ do_cl = do_clear # 'c' is already an abbreviation for 'continue'
+
+ def do_where(self, arg):
+ self.print_stack_trace()
+ do_w = do_where
+
+ def do_up(self, arg):
+ if self.curindex == 0:
+ print '*** Oldest frame'
+ else:
+ self.curindex = self.curindex - 1
+ self.curframe = self.stack[self.curindex][0]
+ self.print_stack_entry(self.stack[self.curindex])
+ self.lineno = None
+ do_u = do_up
+
+ def do_down(self, arg):
+ if self.curindex + 1 == len(self.stack):
+ print '*** Newest frame'
+ else:
+ self.curindex = self.curindex + 1
+ self.curframe = self.stack[self.curindex][0]
+ self.print_stack_entry(self.stack[self.curindex])
+ self.lineno = None
+ do_d = do_down
+
+ def do_step(self, arg):
+ self.set_step()
+ return 1
+ do_s = do_step
+
+ def do_next(self, arg):
+ self.set_next(self.curframe)
+ return 1
+ do_n = do_next
+
+ def do_return(self, arg):
+ self.set_return(self.curframe)
+ return 1
+ do_r = do_return
+
+ def do_continue(self, arg):
+ self.set_continue()
+ return 1
+ do_c = do_cont = do_continue
+
+ def do_quit(self, arg):
+ self.set_quit()
+ return 1
+ do_q = do_quit
+
+ def do_args(self, arg):
+ f = self.curframe
+ co = f.f_code
+ dict = f.f_locals
+ n = co.co_argcount
+ if co.co_flags & 4: n = n+1
+ if co.co_flags & 8: n = n+1
+ for i in range(n):
+ name = co.co_varnames[i]
+ print name, '=',
+ if dict.has_key(name): print dict[name]
+ else: print "*** undefined ***"
+ do_a = do_args
+
+ def do_retval(self, arg):
+ if self.curframe.f_locals.has_key('__return__'):
+ print self.curframe.f_locals['__return__']
+ else:
+ print '*** Not yet returned!'
+ do_rv = do_retval
+
+ def do_p(self, arg):
+ try:
+ value = eval(arg, self.curframe.f_globals,
+ self.curframe.f_locals)
+ except:
+ t, v = sys.exc_info()[:2]
+ if type(t) == type(''):
+ exc_type_name = t
+ else: exc_type_name = t.__name__
+ print '***', exc_type_name + ':', `v`
+ return
+
+ print `value`
+
+ def do_list(self, arg):
+ self.lastcmd = 'list'
+ last = None
+ if arg:
+ try:
+ x = eval(arg, {}, {})
+ if type(x) == type(()):
+ first, last = x
+ first = int(first)
+ last = int(last)
+ if last < first:
+ # Assume it's a count
+ last = first + last
+ else:
+ first = max(1, int(x) - 5)
+ except:
+ print '*** Error in argument:', `arg`
+ return
+ elif self.lineno is None:
+ first = max(1, self.curframe.f_lineno - 5)
+ else:
+ first = self.lineno + 1
+ if last is None:
+ last = first + 10
+ filename = self.curframe.f_code.co_filename
+ breaklist = self.get_file_breaks(filename)
+ try:
+ for lineno in range(first, last+1):
+ line = linecache.getline(filename, lineno)
+ if not line:
+ print '[EOF]'
+ break
+ else:
+ s = string.rjust(`lineno`, 3)
+ if len(s) < 4: s = s + ' '
+ if lineno in breaklist: s = s + 'B'
+ else: s = s + ' '
+ if lineno == self.curframe.f_lineno:
+ s = s + '->'
+ print s + '\t' + line,
+ self.lineno = lineno
+ except KeyboardInterrupt:
+ pass
+ do_l = do_list
+
+ def do_whatis(self, arg):
+ try:
+ value = eval(arg, self.curframe.f_globals,
+ self.curframe.f_locals)
+ except:
+ t, v = sys.exc_info()[:2]
+ if type(t) == type(''):
+ exc_type_name = t
+ else: exc_type_name = t.__name__
+ print '***', exc_type_name + ':', `v`
+ return
+ code = None
+ # Is it a function?
+ try: code = value.func_code
+ except: pass
+ if code:
+ print 'Function', code.co_name
+ return
+ # Is it an instance method?
+ try: code = value.im_func.func_code
+ except: pass
+ if code:
+ print 'Method', code.co_name
+ return
+ # None of the above...
+ print type(value)
+
+ def do_alias(self, arg):
+ args = string.split (arg)
+ if len(args) == 0:
+ keys = self.aliases.keys()
+ keys.sort()
+ for alias in keys:
+ print "%s = %s" % (alias, self.aliases[alias])
+ return
+ if self.aliases.has_key(args[0]) and len (args) == 1:
+ print "%s = %s" % (args[0], self.aliases[args[0]])
+ else:
+ self.aliases[args[0]] = string.join(args[1:], ' ')
+
+ def do_unalias(self, arg):
+ args = string.split (arg)
+ if len(args) == 0: return
+ if self.aliases.has_key(args[0]):
+ del self.aliases[args[0]]
+
+ # Print a traceback starting at the top stack frame.
+ # The most recently entered frame is printed last;
+ # this is different from dbx and gdb, but consistent with
+ # the Python interpreter's stack trace.
+ # It is also consistent with the up/down commands (which are
+ # compatible with dbx and gdb: up moves towards 'main()'
+ # and down moves towards the most recent stack frame).
+
+ def print_stack_trace(self):
+ try:
+ for frame_lineno in self.stack:
+ self.print_stack_entry(frame_lineno)
+ except KeyboardInterrupt:
+ pass
+
+ def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
+ frame, lineno = frame_lineno
+ if frame is self.curframe:
+ print '>',
+ else:
+ print ' ',
+ print self.format_stack_entry(frame_lineno, prompt_prefix)
+
+
+ # Help methods (derived from pdb.doc)
+
+ def help_help(self):
+ self.help_h()
+
+ def help_h(self):
+ print """h(elp)
+Without argument, print the list of available commands.
+With a command name as argument, print help about that command
+"help pdb" pipes the full documentation file to the $PAGER
+"help exec" gives help on the ! command"""
+
+ def help_where(self):
+ self.help_w()
+
+ def help_w(self):
+ print """w(here)
+Print a stack trace, with the most recent frame at the bottom.
+An arrow indicates the "current frame", which determines the
+context of most commands."""
+
+ def help_down(self):
+ self.help_d()
+
+ def help_d(self):
+ print """d(own)
+Move the current frame one level down in the stack trace
+(to an older frame)."""
+
+ def help_up(self):
+ self.help_u()
+
+ def help_u(self):
+ print """u(p)
+Move the current frame one level up in the stack trace
+(to a newer frame)."""
+
+ def help_break(self):
+ self.help_b()
+
+ def help_b(self):
+ print """b(reak) ([file:]lineno | function) [, condition]
+With a line number argument, set a break there in the current
+file. With a function name, set a break at first executable line
+of that function. Without argument, list all breaks. If a second
+argument is present, it is a string specifying an expression
+which must evaluate to true before the breakpoint is honored.
+
+The line number may be prefixed with a filename and a colon,
+to specify a breakpoint in another file (probably one that
+hasn't been loaded yet). The file is searched for on sys.path;
+the .py suffix may be omitted."""
+
+ def help_clear(self):
+ self.help_cl()
+
+ def help_cl(self):
+ print "cl(ear) filename:lineno"
+ print """cl(ear) [bpnumber [bpnumber...]]
+With a space separated list of breakpoint numbers, clear
+those breakpoints. Without argument, clear all breaks (but
+first ask confirmation). With a filename:lineno argument,
+clear all breaks at that line in that file.
+
+Note that the argument is different from previous versions of
+the debugger (in python distributions 1.5.1 and before) where
+a linenumber was used instead of either filename:lineno or
+breakpoint numbers."""
+
+ def help_tbreak(self):
+ print """tbreak same arguments as break, but breakpoint is
+removed when first hit."""
+
+ def help_enable(self):
+ print """enable bpnumber [bpnumber ...]
+Enables the breakpoints given as a space separated list of
+bp numbers."""
+
+ def help_disable(self):
+ print """disable bpnumber [bpnumber ...]
+Disables the breakpoints given as a space separated list of
+bp numbers."""
+
+ def help_ignore(self):
+ print """ignore bpnumber count
+Sets the ignore count for the given breakpoint number. A breakpoint
+becomes active when the ignore count is zero. When non-zero, the
+count is decremented each time the breakpoint is reached and the
+breakpoint is not disabled and any associated condition evaluates
+to true."""
+
+ def help_condition(self):
+ print """condition bpnumber str_condition
+str_condition is a string specifying an expression which
+must evaluate to true before the breakpoint is honored.
+If str_condition is absent, any existing condition is removed;
+i.e., the breakpoint is made unconditional."""
+
+ def help_step(self):
+ self.help_s()
+
+ def help_s(self):
+ print """s(tep)
+Execute the current line, stop at the first possible occasion
+(either in a function that is called or in the current function)."""
+
+ def help_next(self):
+ self.help_n()
+
+ def help_n(self):
+ print """n(ext)
+Continue execution until the next line in the current function
+is reached or it returns."""
+
+ def help_return(self):
+ self.help_r()
+
+ def help_r(self):
+ print """r(eturn)
+Continue execution until the current function returns."""
+
+ def help_continue(self):
+ self.help_c()
+
+ def help_cont(self):
+ self.help_c()
+
+ def help_c(self):
+ print """c(ont(inue))
+Continue execution, only stop when a breakpoint is encountered."""
+
+ def help_list(self):
+ self.help_l()
+
+ def help_l(self):
+ print """l(ist) [first [,last]]
+List source code for the current file.
+Without arguments, list 11 lines around the current line
+or continue the previous listing.
+With one argument, list 11 lines starting at that line.
+With two arguments, list the given range;
+if the second argument is less than the first, it is a count."""
+
+ def help_args(self):
+ self.help_a()
+
+ def help_a(self):
+ print """a(rgs)
+Print the arguments of the current function."""
+
+ def help_p(self):
+ print """p expression
+Print the value of the expression."""
+
+ def help_exec(self):
+ print """(!) statement
+Execute the (one-line) statement in the context of
+the current stack frame.
+The exclamation point can be omitted unless the first word
+of the statement resembles a debugger command.
+To assign to a global variable you must always prefix the
+command with a 'global' command, e.g.:
+(Pdb) global list_options; list_options = ['-l']
+(Pdb)"""
+
+ def help_quit(self):
+ self.help_q()
+
+ def help_q(self):
+ print """q(uit) Quit from the debugger.
+The program being executed is aborted."""
+
+ def help_whatis(self):
+ print """whatis arg
+Prints the type of the argument."""
+
+ def help_EOF(self):
+ print """EOF
+Handles the receipt of EOF as a command."""
+
+ def help_alias(self):
+ print """alias [name [command [parameter parameter ...] ]]
+Creates an alias called 'name' the executes 'command'. The command
+must *not* be enclosed in quotes. Replaceable parameters are
+indicated by %1, %2, and so on, while %* is replaced by all the
+parameters. If no command is given, the current alias for name
+is shown. If no name is given, all aliases are listed.
+
+Aliases may be nested and can contain anything that can be
+legally typed at the pdb prompt. Note! You *can* override
+internal pdb commands with aliases! Those internal commands
+are then hidden until the alias is removed. Aliasing is recursively
+applied to the first word of the command line; all other words
+in the line are left alone.
+
+Some useful aliases (especially when placed in the .pdbrc file) are:
+
+#Print instance variables (usage "pi classInst")
+alias pi for k in %1.__dict__.keys(): print "%1.",k,"=",%1.__dict__[k]
+
+#Print instance variables in self
+alias ps pi self
+"""
+
+ def help_unalias(self):
+ print """unalias name
+Deletes the specified alias."""
+
+ def help_pdb(self):
+ help()
+
+ def lookupmodule(self, filename):
+ """Helper function for break/clear parsing -- may be overridden."""
+ root, ext = os.path.splitext(filename)
+ if ext == '':
+ filename = filename + '.py'
+ if os.path.isabs(filename):
+ return filename
+ for dirname in sys.path:
+ while os.path.islink(dirname):
+ dirname = os.readlink(dirname)
+ fullname = os.path.join(dirname, filename)
+ if os.path.exists(fullname):
+ return fullname
+ return None
# Simplified interface
def run(statement, globals=None, locals=None):
- Pdb().run(statement, globals, locals)
+ Pdb().run(statement, globals, locals)
def runeval(expression, globals=None, locals=None):
- return Pdb().runeval(expression, globals, locals)
+ return Pdb().runeval(expression, globals, locals)
def runctx(statement, globals, locals):
- # B/W compatibility
- run(statement, globals, locals)
+ # B/W compatibility
+ run(statement, globals, locals)
def runcall(*args):
- return apply(Pdb().runcall, args)
+ return apply(Pdb().runcall, args)
def set_trace():
- Pdb().set_trace()
+ Pdb().set_trace()
# Post-Mortem interface
def post_mortem(t):
- p = Pdb()
- p.reset()
- while t.tb_next is not None:
- t = t.tb_next
- p.interaction(t.tb_frame, t)
+ p = Pdb()
+ p.reset()
+ while t.tb_next is not None:
+ t = t.tb_next
+ p.interaction(t.tb_frame, t)
def pm():
- post_mortem(sys.last_traceback)
+ post_mortem(sys.last_traceback)
# Main program for testing
@@ -908,37 +908,37 @@ def pm():
TESTCMD = 'import x; x.main()'
def test():
- run(TESTCMD)
+ run(TESTCMD)
# print help
def help():
- for dirname in sys.path:
- fullname = os.path.join(dirname, 'pdb.doc')
- if os.path.exists(fullname):
- sts = os.system('${PAGER-more} '+fullname)
- if sts: print '*** Pager exit status:', sts
- break
- else:
- print 'Sorry, can\'t find the help file "pdb.doc"',
- print 'along the Python search path'
+ for dirname in sys.path:
+ fullname = os.path.join(dirname, 'pdb.doc')
+ if os.path.exists(fullname):
+ sts = os.system('${PAGER-more} '+fullname)
+ if sts: print '*** Pager exit status:', sts
+ break
+ else:
+ print 'Sorry, can\'t find the help file "pdb.doc"',
+ print 'along the Python search path'
mainmodule = ''
mainpyfile = ''
# When invoked as main program, invoke the debugger on a script
if __name__=='__main__':
- if not sys.argv[1:]:
- print "usage: pdb.py scriptfile [arg] ..."
- sys.exit(2)
+ if not sys.argv[1:]:
+ print "usage: pdb.py scriptfile [arg] ..."
+ sys.exit(2)
- mainpyfile = filename = sys.argv[1] # Get script filename
- if not os.path.exists(filename):
- print 'Error:', `filename`, 'does not exist'
- sys.exit(1)
- mainmodule = os.path.basename(filename)
- del sys.argv[0] # Hide "pdb.py" from argument list
+ mainpyfile = filename = sys.argv[1] # Get script filename
+ if not os.path.exists(filename):
+ print 'Error:', `filename`, 'does not exist'
+ sys.exit(1)
+ mainmodule = os.path.basename(filename)
+ del sys.argv[0] # Hide "pdb.py" from argument list
- # Insert script directory in front of module search path
- sys.path.insert(0, os.path.dirname(filename))
+ # Insert script directory in front of module search path
+ sys.path.insert(0, os.path.dirname(filename))
- run('execfile(' + `filename` + ')')
+ run('execfile(' + `filename` + ')')
diff --git a/Lib/pickle.py b/Lib/pickle.py
index fb9448e..45fd0f1 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -123,7 +123,7 @@ class Pickler:
return LONG_BINGET + s
return GET + `i` + '\n'
-
+
def save(self, object, pers_save = 0):
memo = self.memo
@@ -134,7 +134,7 @@ class Pickler:
return
d = id(object)
-
+
t = type(object)
if ((t is TupleType) and (len(object) == 0)):
@@ -179,14 +179,14 @@ class Pickler:
"tuple" % reduce
l = len(tup)
-
+
if ((l != 2) and (l != 3)):
raise PicklingError, "tuple returned by %s must contain " \
"only two or three elements" % reduce
callable = tup[0]
arg_tup = tup[1]
-
+
if (l > 2):
state = tup[2]
else:
@@ -196,7 +196,7 @@ class Pickler:
raise PicklingError, "Second element of tuple returned " \
"by %s must be a tuple" % reduce
- self.save_reduce(callable, arg_tup, state)
+ self.save_reduce(callable, arg_tup, state)
memo_len = len(memo)
self.write(self.put(memo_len))
memo[d] = (memo_len, object)
@@ -224,7 +224,7 @@ class Pickler:
save(callable)
save(arg_tup)
write(REDUCE)
-
+
if (state is not None):
save(state)
write(BUILD)
@@ -317,7 +317,7 @@ class Pickler:
if (self.bin):
write(POP_MARK + self.get(memo[d][0]))
return
-
+
write(POP * (len(object) + 1) + self.get(memo[d][0]))
return
@@ -352,7 +352,7 @@ class Pickler:
for element in object:
save(element)
-
+
if (not using_appends):
write(APPEND)
@@ -542,7 +542,7 @@ class Unpickler:
def load_binpersid(self):
stack = self.stack
-
+
pid = stack[-1]
del stack[-1]
@@ -568,7 +568,7 @@ class Unpickler:
def load_binint2(self):
self.append(mloads('i' + self.read(2) + '\000\000'))
dispatch[BININT2] = load_binint2
-
+
def load_long(self):
self.append(long(self.readline()[:-1], 0))
dispatch[LONG] = load_long
@@ -710,7 +710,7 @@ class Unpickler:
k = self.marker()
klass = stack[k + 1]
del stack[k + 1]
- args = tuple(stack[k + 1:])
+ args = tuple(stack[k + 1:])
del stack[k:]
instantiated = 0
if (not args and type(klass) is ClassType and
@@ -726,7 +726,7 @@ class Unpickler:
if not instantiated:
value = apply(klass, args)
self.append(value)
- dispatch[OBJ] = load_obj
+ dispatch[OBJ] = load_obj
def load_global(self):
module = self.readline()[:-1]
@@ -761,8 +761,8 @@ class Unpickler:
safe = None
if (not safe):
- raise UnpicklingError, "%s is not safe for " \
- "unpickling" % callable
+ raise UnpicklingError, "%s is not safe for " \
+ "unpickling" % callable
if arg_tup is None:
value = callable.__basicnew__()
@@ -829,7 +829,7 @@ class Unpickler:
del stack[mark:]
dispatch[APPENDS] = load_appends
-
+
def load_setitem(self):
stack = self.stack
value = stack[-1]
diff --git a/Lib/pipes.py b/Lib/pipes.py
index 3aa1bf1..77a57e8 100644
--- a/Lib/pipes.py
+++ b/Lib/pipes.py
@@ -69,229 +69,229 @@ import string
# Conversion step kinds
-FILEIN_FILEOUT = 'ff' # Must read & write real files
-STDIN_FILEOUT = '-f' # Must write a real file
-FILEIN_STDOUT = 'f-' # Must read a real file
-STDIN_STDOUT = '--' # Normal pipeline element
-SOURCE = '.-' # Must be first, writes stdout
-SINK = '-.' # Must be last, reads stdin
+FILEIN_FILEOUT = 'ff' # Must read & write real files
+STDIN_FILEOUT = '-f' # Must write a real file
+FILEIN_STDOUT = 'f-' # Must read a real file
+STDIN_STDOUT = '--' # Normal pipeline element
+SOURCE = '.-' # Must be first, writes stdout
+SINK = '-.' # Must be last, reads stdin
stepkinds = [FILEIN_FILEOUT, STDIN_FILEOUT, FILEIN_STDOUT, STDIN_STDOUT, \
- SOURCE, SINK]
+ SOURCE, SINK]
class Template:
- """Class representing a pipeline template."""
-
- def __init__(self):
- """Template() returns a fresh pipeline template."""
- self.debugging = 0
- self.reset()
-
- def __repr__(self):
- """t.__repr__() implements `t`."""
- return '<Template instance, steps=' + `self.steps` + '>'
-
- def reset(self):
- """t.reset() restores a pipeline template to its initial state."""
- self.steps = []
-
- def clone(self):
- """t.clone() returns a new pipeline template with identical
- initial state as the current one."""
- t = Template()
- t.steps = self.steps[:]
- t.debugging = self.debugging
- return t
-
- def debug(self, flag):
- """t.debug(flag) turns debugging on or off."""
- self.debugging = flag
-
- def append(self, cmd, kind):
- """t.append(cmd, kind) adds a new step at the end."""
- if type(cmd) is not type(''):
- raise TypeError, \
- 'Template.append: cmd must be a string'
- if kind not in stepkinds:
- raise ValueError, \
- 'Template.append: bad kind ' + `kind`
- if kind == SOURCE:
- raise ValueError, \
- 'Template.append: SOURCE can only be prepended'
- if self.steps and self.steps[-1][1] == SINK:
- raise ValueError, \
- 'Template.append: already ends with SINK'
- if kind[0] == 'f' and not re.search('\$IN\b', cmd):
- raise ValueError, \
- 'Template.append: missing $IN in cmd'
- if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
- raise ValueError, \
- 'Template.append: missing $OUT in cmd'
- self.steps.append((cmd, kind))
-
- def prepend(self, cmd, kind):
- """t.prepend(cmd, kind) adds a new step at the front."""
- if type(cmd) is not type(''):
- raise TypeError, \
- 'Template.prepend: cmd must be a string'
- if kind not in stepkinds:
- raise ValueError, \
- 'Template.prepend: bad kind ' + `kind`
- if kind == SINK:
- raise ValueError, \
- 'Template.prepend: SINK can only be appended'
- if self.steps and self.steps[0][1] == SOURCE:
- raise ValueError, \
- 'Template.prepend: already begins with SOURCE'
- if kind[0] == 'f' and not re.search('\$IN\b', cmd):
- raise ValueError, \
- 'Template.prepend: missing $IN in cmd'
- if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
- raise ValueError, \
- 'Template.prepend: missing $OUT in cmd'
- self.steps.insert(0, (cmd, kind))
-
- def open(self, file, rw):
- """t.open(file, rw) returns a pipe or file object open for
- reading or writing; the file is the other end of the pipeline."""
- if rw == 'r':
- return self.open_r(file)
- if rw == 'w':
- return self.open_w(file)
- raise ValueError, \
- 'Template.open: rw must be \'r\' or \'w\', not ' + `rw`
-
- def open_r(self, file):
- """t.open_r(file) and t.open_w(file) implement
- t.open(file, 'r') and t.open(file, 'w') respectively."""
- if not self.steps:
- return open(file, 'r')
- if self.steps[-1][1] == SINK:
- raise ValueError, \
- 'Template.open_r: pipeline ends width SINK'
- cmd = self.makepipeline(file, '')
- return os.popen(cmd, 'r')
-
- def open_w(self, file):
- if not self.steps:
- return open(file, 'w')
- if self.steps[0][1] == SOURCE:
- raise ValueError, \
- 'Template.open_w: pipeline begins with SOURCE'
- cmd = self.makepipeline('', file)
- return os.popen(cmd, 'w')
-
- def copy(self, infile, outfile):
- return os.system(self.makepipeline(infile, outfile))
-
- def makepipeline(self, infile, outfile):
- cmd = makepipeline(infile, self.steps, outfile)
- if self.debugging:
- print cmd
- cmd = 'set -x; ' + cmd
- return cmd
+ """Class representing a pipeline template."""
+
+ def __init__(self):
+ """Template() returns a fresh pipeline template."""
+ self.debugging = 0
+ self.reset()
+
+ def __repr__(self):
+ """t.__repr__() implements `t`."""
+ return '<Template instance, steps=' + `self.steps` + '>'
+
+ def reset(self):
+ """t.reset() restores a pipeline template to its initial state."""
+ self.steps = []
+
+ def clone(self):
+ """t.clone() returns a new pipeline template with identical
+ initial state as the current one."""
+ t = Template()
+ t.steps = self.steps[:]
+ t.debugging = self.debugging
+ return t
+
+ def debug(self, flag):
+ """t.debug(flag) turns debugging on or off."""
+ self.debugging = flag
+
+ def append(self, cmd, kind):
+ """t.append(cmd, kind) adds a new step at the end."""
+ if type(cmd) is not type(''):
+ raise TypeError, \
+ 'Template.append: cmd must be a string'
+ if kind not in stepkinds:
+ raise ValueError, \
+ 'Template.append: bad kind ' + `kind`
+ if kind == SOURCE:
+ raise ValueError, \
+ 'Template.append: SOURCE can only be prepended'
+ if self.steps and self.steps[-1][1] == SINK:
+ raise ValueError, \
+ 'Template.append: already ends with SINK'
+ if kind[0] == 'f' and not re.search('\$IN\b', cmd):
+ raise ValueError, \
+ 'Template.append: missing $IN in cmd'
+ if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
+ raise ValueError, \
+ 'Template.append: missing $OUT in cmd'
+ self.steps.append((cmd, kind))
+
+ def prepend(self, cmd, kind):
+ """t.prepend(cmd, kind) adds a new step at the front."""
+ if type(cmd) is not type(''):
+ raise TypeError, \
+ 'Template.prepend: cmd must be a string'
+ if kind not in stepkinds:
+ raise ValueError, \
+ 'Template.prepend: bad kind ' + `kind`
+ if kind == SINK:
+ raise ValueError, \
+ 'Template.prepend: SINK can only be appended'
+ if self.steps and self.steps[0][1] == SOURCE:
+ raise ValueError, \
+ 'Template.prepend: already begins with SOURCE'
+ if kind[0] == 'f' and not re.search('\$IN\b', cmd):
+ raise ValueError, \
+ 'Template.prepend: missing $IN in cmd'
+ if kind[1] == 'f' and not re.search('\$OUT\b', cmd):
+ raise ValueError, \
+ 'Template.prepend: missing $OUT in cmd'
+ self.steps.insert(0, (cmd, kind))
+
+ def open(self, file, rw):
+ """t.open(file, rw) returns a pipe or file object open for
+ reading or writing; the file is the other end of the pipeline."""
+ if rw == 'r':
+ return self.open_r(file)
+ if rw == 'w':
+ return self.open_w(file)
+ raise ValueError, \
+ 'Template.open: rw must be \'r\' or \'w\', not ' + `rw`
+
+ def open_r(self, file):
+ """t.open_r(file) and t.open_w(file) implement
+ t.open(file, 'r') and t.open(file, 'w') respectively."""
+ if not self.steps:
+ return open(file, 'r')
+ if self.steps[-1][1] == SINK:
+ raise ValueError, \
+ 'Template.open_r: pipeline ends width SINK'
+ cmd = self.makepipeline(file, '')
+ return os.popen(cmd, 'r')
+
+ def open_w(self, file):
+ if not self.steps:
+ return open(file, 'w')
+ if self.steps[0][1] == SOURCE:
+ raise ValueError, \
+ 'Template.open_w: pipeline begins with SOURCE'
+ cmd = self.makepipeline('', file)
+ return os.popen(cmd, 'w')
+
+ def copy(self, infile, outfile):
+ return os.system(self.makepipeline(infile, outfile))
+
+ def makepipeline(self, infile, outfile):
+ cmd = makepipeline(infile, self.steps, outfile)
+ if self.debugging:
+ print cmd
+ cmd = 'set -x; ' + cmd
+ return cmd
def makepipeline(infile, steps, outfile):
- # Build a list with for each command:
- # [input filename or '', command string, kind, output filename or '']
-
- list = []
- for cmd, kind in steps:
- list.append(['', cmd, kind, ''])
- #
- # Make sure there is at least one step
- #
- if not list:
- list.append(['', 'cat', '--', ''])
- #
- # Take care of the input and output ends
- #
- [cmd, kind] = list[0][1:3]
- if kind[0] == 'f' and not infile:
- list.insert(0, ['', 'cat', '--', ''])
- list[0][0] = infile
- #
- [cmd, kind] = list[-1][1:3]
- if kind[1] == 'f' and not outfile:
- list.append(['', 'cat', '--', ''])
- list[-1][-1] = outfile
- #
- # Invent temporary files to connect stages that need files
- #
- garbage = []
- for i in range(1, len(list)):
- lkind = list[i-1][2]
- rkind = list[i][2]
- if lkind[1] == 'f' or rkind[0] == 'f':
- temp = tempfile.mktemp()
- garbage.append(temp)
- list[i-1][-1] = list[i][0] = temp
- #
- for item in list:
- [inf, cmd, kind, outf] = item
- if kind[1] == 'f':
- cmd = 'OUT=' + quote(outf) + '; ' + cmd
- if kind[0] == 'f':
- cmd = 'IN=' + quote(inf) + '; ' + cmd
- if kind[0] == '-' and inf:
- cmd = cmd + ' <' + quote(inf)
- if kind[1] == '-' and outf:
- cmd = cmd + ' >' + quote(outf)
- item[1] = cmd
- #
- cmdlist = list[0][1]
- for item in list[1:]:
- [cmd, kind] = item[1:3]
- if item[0] == '':
- if 'f' in kind:
- cmd = '{ ' + cmd + '; }'
- cmdlist = cmdlist + ' |\n' + cmd
- else:
- cmdlist = cmdlist + '\n' + cmd
- #
- if garbage:
- rmcmd = 'rm -f'
- for file in garbage:
- rmcmd = rmcmd + ' ' + quote(file)
- trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
- cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
- #
- return cmdlist
+ # Build a list with for each command:
+ # [input filename or '', command string, kind, output filename or '']
+
+ list = []
+ for cmd, kind in steps:
+ list.append(['', cmd, kind, ''])
+ #
+ # Make sure there is at least one step
+ #
+ if not list:
+ list.append(['', 'cat', '--', ''])
+ #
+ # Take care of the input and output ends
+ #
+ [cmd, kind] = list[0][1:3]
+ if kind[0] == 'f' and not infile:
+ list.insert(0, ['', 'cat', '--', ''])
+ list[0][0] = infile
+ #
+ [cmd, kind] = list[-1][1:3]
+ if kind[1] == 'f' and not outfile:
+ list.append(['', 'cat', '--', ''])
+ list[-1][-1] = outfile
+ #
+ # Invent temporary files to connect stages that need files
+ #
+ garbage = []
+ for i in range(1, len(list)):
+ lkind = list[i-1][2]
+ rkind = list[i][2]
+ if lkind[1] == 'f' or rkind[0] == 'f':
+ temp = tempfile.mktemp()
+ garbage.append(temp)
+ list[i-1][-1] = list[i][0] = temp
+ #
+ for item in list:
+ [inf, cmd, kind, outf] = item
+ if kind[1] == 'f':
+ cmd = 'OUT=' + quote(outf) + '; ' + cmd
+ if kind[0] == 'f':
+ cmd = 'IN=' + quote(inf) + '; ' + cmd
+ if kind[0] == '-' and inf:
+ cmd = cmd + ' <' + quote(inf)
+ if kind[1] == '-' and outf:
+ cmd = cmd + ' >' + quote(outf)
+ item[1] = cmd
+ #
+ cmdlist = list[0][1]
+ for item in list[1:]:
+ [cmd, kind] = item[1:3]
+ if item[0] == '':
+ if 'f' in kind:
+ cmd = '{ ' + cmd + '; }'
+ cmdlist = cmdlist + ' |\n' + cmd
+ else:
+ cmdlist = cmdlist + '\n' + cmd
+ #
+ if garbage:
+ rmcmd = 'rm -f'
+ for file in garbage:
+ rmcmd = rmcmd + ' ' + quote(file)
+ trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
+ cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
+ #
+ return cmdlist
# Reliably quote a string as a single argument for /bin/sh
-_safechars = string.letters + string.digits + '!@%_-+=:,./' # Safe unquoted
-_funnychars = '"`$\\' # Unsafe inside "double quotes"
+_safechars = string.letters + string.digits + '!@%_-+=:,./' # Safe unquoted
+_funnychars = '"`$\\' # Unsafe inside "double quotes"
def quote(file):
- for c in file:
- if c not in _safechars:
- break
- else:
- return file
- if '\'' not in file:
- return '\'' + file + '\''
- res = ''
- for c in file:
- if c in _funnychars:
- c = '\\' + c
- res = res + c
- return '"' + res + '"'
+ for c in file:
+ if c not in _safechars:
+ break
+ else:
+ return file
+ if '\'' not in file:
+ return '\'' + file + '\''
+ res = ''
+ for c in file:
+ if c in _funnychars:
+ c = '\\' + c
+ res = res + c
+ return '"' + res + '"'
# Small test program and example
def test():
- print 'Testing...'
- t = Template()
- t.append('togif $IN $OUT', 'ff')
- t.append('giftoppm', '--')
- t.append('ppmtogif >$OUT', '-f')
- t.append('fromgif $IN $OUT', 'ff')
- t.debug(1)
- FILE = '/usr/local/images/rgb/rogues/guido.rgb'
- t.copy(FILE, '@temp')
- print 'Done.'
+ print 'Testing...'
+ t = Template()
+ t.append('togif $IN $OUT', 'ff')
+ t.append('giftoppm', '--')
+ t.append('ppmtogif >$OUT', '-f')
+ t.append('fromgif $IN $OUT', 'ff')
+ t.debug(1)
+ FILE = '/usr/local/images/rgb/rogues/guido.rgb'
+ t.copy(FILE, '@temp')
+ print 'Done.'
diff --git a/Lib/poplib.py b/Lib/poplib.py
index 5a3813c..2f55f74 100644
--- a/Lib/poplib.py
+++ b/Lib/poplib.py
@@ -32,290 +32,290 @@ CRLF = CR+LF
class POP3:
- """This class supports both the minimal and optional command sets.
- Arguments can be strings or integers (where appropriate)
- (e.g.: retr(1) and retr('1') both work equally well.
-
- Minimal Command Set:
- USER name user(name)
- PASS string pass_(string)
- STAT stat()
- LIST [msg] list(msg = None)
- RETR msg retr(msg)
- DELE msg dele(msg)
- NOOP noop()
- RSET rset()
- QUIT quit()
-
- Optional Commands (some servers support these):
- RPOP name rpop(name)
- APOP name digest apop(name, digest)
- TOP msg n top(msg, n)
- UIDL [msg] uidl(msg = None)
-
- Raises one exception: 'error_proto'.
+ """This class supports both the minimal and optional command sets.
+ Arguments can be strings or integers (where appropriate)
+ (e.g.: retr(1) and retr('1') both work equally well.
+
+ Minimal Command Set:
+ USER name user(name)
+ PASS string pass_(string)
+ STAT stat()
+ LIST [msg] list(msg = None)
+ RETR msg retr(msg)
+ DELE msg dele(msg)
+ NOOP noop()
+ RSET rset()
+ QUIT quit()
+
+ Optional Commands (some servers support these):
+ RPOP name rpop(name)
+ APOP name digest apop(name, digest)
+ TOP msg n top(msg, n)
+ UIDL [msg] uidl(msg = None)
+
+ Raises one exception: 'error_proto'.
- Instantiate with:
- POP3(hostname, port=110)
+ Instantiate with:
+ POP3(hostname, port=110)
- NB: the POP protocol locks the mailbox from user
- authorization until QUIT, so be sure to get in, suck
- the messages, and quit, each time you access the
- mailbox.
+ NB: the POP protocol locks the mailbox from user
+ authorization until QUIT, so be sure to get in, suck
+ the messages, and quit, each time you access the
+ mailbox.
- POP is a line-based protocol, which means large mail
- messages consume lots of python cycles reading them
- line-by-line.
+ POP is a line-based protocol, which means large mail
+ messages consume lots of python cycles reading them
+ line-by-line.
- If it's available on your mail server, use IMAP4
- instead, it doesn't suffer from the two problems
- above.
- """
+ If it's available on your mail server, use IMAP4
+ instead, it doesn't suffer from the two problems
+ above.
+ """
- def __init__(self, host, port = POP3_PORT):
- self.host = host
- self.port = port
- self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sock.connect((self.host, self.port))
- self.file = self.sock.makefile('rb')
- self._debugging = 0
- self.welcome = self._getresp()
+ def __init__(self, host, port = POP3_PORT):
+ self.host = host
+ self.port = port
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.sock.connect((self.host, self.port))
+ self.file = self.sock.makefile('rb')
+ self._debugging = 0
+ self.welcome = self._getresp()
- def _putline(self, line):
- #if self._debugging > 1: print '*put*', `line`
- self.sock.send('%s%s' % (line, CRLF))
+ def _putline(self, line):
+ #if self._debugging > 1: print '*put*', `line`
+ self.sock.send('%s%s' % (line, CRLF))
- # Internal: send one command to the server (through _putline())
+ # Internal: send one command to the server (through _putline())
- def _putcmd(self, line):
- #if self._debugging: print '*cmd*', `line`
- self._putline(line)
+ def _putcmd(self, line):
+ #if self._debugging: print '*cmd*', `line`
+ self._putline(line)
- # Internal: return one line from the server, stripping CRLF.
- # This is where all the CPU time of this module is consumed.
- # Raise error_proto('-ERR EOF') if the connection is closed.
+ # Internal: return one line from the server, stripping CRLF.
+ # This is where all the CPU time of this module is consumed.
+ # Raise error_proto('-ERR EOF') if the connection is closed.
- def _getline(self):
- line = self.file.readline()
- #if self._debugging > 1: print '*get*', `line`
- if not line: raise error_proto('-ERR EOF')
- octets = len(line)
- # server can send any combination of CR & LF
- # however, 'readline()' returns lines ending in LF
- # so only possibilities are ...LF, ...CRLF, CR...LF
- if line[-2:] == CRLF:
- return line[:-2], octets
- if line[0] == CR:
- return line[1:-1], octets
- return line[:-1], octets
+ def _getline(self):
+ line = self.file.readline()
+ #if self._debugging > 1: print '*get*', `line`
+ if not line: raise error_proto('-ERR EOF')
+ octets = len(line)
+ # server can send any combination of CR & LF
+ # however, 'readline()' returns lines ending in LF
+ # so only possibilities are ...LF, ...CRLF, CR...LF
+ if line[-2:] == CRLF:
+ return line[:-2], octets
+ if line[0] == CR:
+ return line[1:-1], octets
+ return line[:-1], octets
- # Internal: get a response from the server.
- # Raise 'error_proto' if the response doesn't start with '+'.
+ # Internal: get a response from the server.
+ # Raise 'error_proto' if the response doesn't start with '+'.
- def _getresp(self):
- resp, o = self._getline()
- #if self._debugging > 1: print '*resp*', `resp`
- c = resp[:1]
- if c != '+':
- raise error_proto(resp)
- return resp
+ def _getresp(self):
+ resp, o = self._getline()
+ #if self._debugging > 1: print '*resp*', `resp`
+ c = resp[:1]
+ if c != '+':
+ raise error_proto(resp)
+ return resp
- # Internal: get a response plus following text from the server.
+ # Internal: get a response plus following text from the server.
- def _getlongresp(self):
- resp = self._getresp()
- list = []; octets = 0
- line, o = self._getline()
- while line != '.':
- if line[:2] == '..':
- o = o-1
- line = line[1:]
- octets = octets + o
- list.append(line)
- line, o = self._getline()
- return resp, list, octets
+ def _getlongresp(self):
+ resp = self._getresp()
+ list = []; octets = 0
+ line, o = self._getline()
+ while line != '.':
+ if line[:2] == '..':
+ o = o-1
+ line = line[1:]
+ octets = octets + o
+ list.append(line)
+ line, o = self._getline()
+ return resp, list, octets
- # Internal: send a command and get the response
+ # Internal: send a command and get the response
- def _shortcmd(self, line):
- self._putcmd(line)
- return self._getresp()
+ def _shortcmd(self, line):
+ self._putcmd(line)
+ return self._getresp()
- # Internal: send a command and get the response plus following text
+ # Internal: send a command and get the response plus following text
- def _longcmd(self, line):
- self._putcmd(line)
- return self._getlongresp()
+ def _longcmd(self, line):
+ self._putcmd(line)
+ return self._getlongresp()
- # These can be useful:
+ # These can be useful:
- def getwelcome(self):
- return self.welcome
+ def getwelcome(self):
+ return self.welcome
- def set_debuglevel(self, level):
- self._debugging = level
+ def set_debuglevel(self, level):
+ self._debugging = level
- # Here are all the POP commands:
+ # Here are all the POP commands:
- def user(self, user):
- """Send user name, return response
-
- (should indicate password required).
- """
- return self._shortcmd('USER %s' % user)
+ def user(self, user):
+ """Send user name, return response
+ (should indicate password required).
+ """
+ return self._shortcmd('USER %s' % user)
- def pass_(self, pswd):
- """Send password, return response
-
- (response includes message count, mailbox size).
- NB: mailbox is locked by server from here to 'quit()'
- """
- return self._shortcmd('PASS %s' % pswd)
+ def pass_(self, pswd):
+ """Send password, return response
+ (response includes message count, mailbox size).
- def stat(self):
- """Get mailbox status.
-
- Result is tuple of 2 ints (message count, mailbox size)
- """
- retval = self._shortcmd('STAT')
- rets = string.split(retval)
- #if self._debugging: print '*stat*', `rets`
- numMessages = string.atoi(rets[1])
- sizeMessages = string.atoi(rets[2])
- return (numMessages, sizeMessages)
+ NB: mailbox is locked by server from here to 'quit()'
+ """
+ return self._shortcmd('PASS %s' % pswd)
- def list(self, which=None):
- """Request listing, return result.
+ def stat(self):
+ """Get mailbox status.
- Result without a message number argument is in form
- ['response', ['mesg_num octets', ...]].
+ Result is tuple of 2 ints (message count, mailbox size)
+ """
+ retval = self._shortcmd('STAT')
+ rets = string.split(retval)
+ #if self._debugging: print '*stat*', `rets`
+ numMessages = string.atoi(rets[1])
+ sizeMessages = string.atoi(rets[2])
+ return (numMessages, sizeMessages)
- Result when a message number argument is given is a
- single response: the "scan listing" for that message.
- """
- if which:
- return self._shortcmd('LIST %s' % which)
- return self._longcmd('LIST')
+ def list(self, which=None):
+ """Request listing, return result.
- def retr(self, which):
- """Retrieve whole message number 'which'.
+ Result without a message number argument is in form
+ ['response', ['mesg_num octets', ...]].
- Result is in form ['response', ['line', ...], octets].
- """
- return self._longcmd('RETR %s' % which)
+ Result when a message number argument is given is a
+ single response: the "scan listing" for that message.
+ """
+ if which:
+ return self._shortcmd('LIST %s' % which)
+ return self._longcmd('LIST')
- def dele(self, which):
- """Delete message number 'which'.
+ def retr(self, which):
+ """Retrieve whole message number 'which'.
- Result is 'response'.
- """
- return self._shortcmd('DELE %s' % which)
+ Result is in form ['response', ['line', ...], octets].
+ """
+ return self._longcmd('RETR %s' % which)
- def noop(self):
- """Does nothing.
-
- One supposes the response indicates the server is alive.
- """
- return self._shortcmd('NOOP')
+ def dele(self, which):
+ """Delete message number 'which'.
+ Result is 'response'.
+ """
+ return self._shortcmd('DELE %s' % which)
- def rset(self):
- """Not sure what this does."""
- return self._shortcmd('RSET')
+ def noop(self):
+ """Does nothing.
- def quit(self):
- """Signoff: commit changes on server, unlock mailbox, close connection."""
- try:
- resp = self._shortcmd('QUIT')
- except error_proto, val:
- resp = val
- self.file.close()
- self.sock.close()
- del self.file, self.sock
- return resp
+ One supposes the response indicates the server is alive.
+ """
+ return self._shortcmd('NOOP')
- #__del__ = quit
+ def rset(self):
+ """Not sure what this does."""
+ return self._shortcmd('RSET')
- # optional commands:
- def rpop(self, user):
- """Not sure what this does."""
- return self._shortcmd('RPOP %s' % user)
+ def quit(self):
+ """Signoff: commit changes on server, unlock mailbox, close connection."""
+ try:
+ resp = self._shortcmd('QUIT')
+ except error_proto, val:
+ resp = val
+ self.file.close()
+ self.sock.close()
+ del self.file, self.sock
+ return resp
+ #__del__ = quit
- timestamp = regex.compile('\+OK.*\(<[^>]+>\)')
- def apop(self, user, secret):
- """Authorisation
-
- - only possible if server has supplied a timestamp in initial greeting.
+ # optional commands:
- Args:
- user - mailbox user;
- secret - secret shared between client and server.
+ def rpop(self, user):
+ """Not sure what this does."""
+ return self._shortcmd('RPOP %s' % user)
- NB: mailbox is locked by server from here to 'quit()'
- """
- if self.timestamp.match(self.welcome) <= 0:
- raise error_proto('-ERR APOP not supported by server')
- import md5
- digest = md5.new(self.timestamp.group(1)+secret).digest()
- digest = string.join(map(lambda x:'%02x'%ord(x), digest), '')
- return self._shortcmd('APOP %s %s' % (user, digest))
+ timestamp = regex.compile('\+OK.*\(<[^>]+>\)')
- def top(self, which, howmuch):
- """Retrieve message header of message number 'which'
- and first 'howmuch' lines of message body.
+ def apop(self, user, secret):
+ """Authorisation
- Result is in form ['response', ['line', ...], octets].
- """
- return self._longcmd('TOP %s %s' % (which, howmuch))
+ - only possible if server has supplied a timestamp in initial greeting.
+ Args:
+ user - mailbox user;
+ secret - secret shared between client and server.
- def uidl(self, which=None):
- """Return message digest (unique id) list.
+ NB: mailbox is locked by server from here to 'quit()'
+ """
+ if self.timestamp.match(self.welcome) <= 0:
+ raise error_proto('-ERR APOP not supported by server')
+ import md5
+ digest = md5.new(self.timestamp.group(1)+secret).digest()
+ digest = string.join(map(lambda x:'%02x'%ord(x), digest), '')
+ return self._shortcmd('APOP %s %s' % (user, digest))
+
+
+ def top(self, which, howmuch):
+ """Retrieve message header of message number 'which'
+ and first 'howmuch' lines of message body.
+
+ Result is in form ['response', ['line', ...], octets].
+ """
+ return self._longcmd('TOP %s %s' % (which, howmuch))
+
+
+ def uidl(self, which=None):
+ """Return message digest (unique id) list.
+
+ If 'which', result contains unique id for that message
+ in the form 'response mesgnum uid', otherwise result is
+ the list ['response', ['mesgnum uid', ...], octets]
+ """
+ if which:
+ return self._shortcmd('UIDL %s' % which)
+ return self._longcmd('UIDL')
- If 'which', result contains unique id for that message
- in the form 'response mesgnum uid', otherwise result is
- the list ['response', ['mesgnum uid', ...], octets]
- """
- if which:
- return self._shortcmd('UIDL %s' % which)
- return self._longcmd('UIDL')
-
if __name__ == "__main__":
- a = POP3(TESTSERVER)
- print a.getwelcome()
- a.user(TESTACCOUNT)
- a.pass_(TESTPASSWORD)
- a.list()
- (numMsgs, totalSize) = a.stat()
- for i in range(1, numMsgs + 1):
- (header, msg, octets) = a.retr(i)
- print "Message ", `i`, ':'
- for line in msg:
- print ' ' + line
- print '-----------------------'
- a.quit()
+ a = POP3(TESTSERVER)
+ print a.getwelcome()
+ a.user(TESTACCOUNT)
+ a.pass_(TESTPASSWORD)
+ a.list()
+ (numMsgs, totalSize) = a.stat()
+ for i in range(1, numMsgs + 1):
+ (header, msg, octets) = a.retr(i)
+ print "Message ", `i`, ':'
+ for line in msg:
+ print ' ' + line
+ print '-----------------------'
+ a.quit()
diff --git a/Lib/posixfile.py b/Lib/posixfile.py
index 23f2c85..58c4b4f 100644
--- a/Lib/posixfile.py
+++ b/Lib/posixfile.py
@@ -129,7 +129,7 @@ class _posixfile_:
l_flags = fcntl.fcntl(file.fileno(), FCNTL.F_SETFL, l_flags)
- if 'c' in which:
+ if 'c' in which:
arg = ('!' not in which) # 0 is don't, 1 is do close on exec
l_flags = fcntl.fcntl(file.fileno(), FCNTL.F_SETFD, arg)
@@ -142,7 +142,7 @@ class _posixfile_:
if FCNTL.O_NDELAY & l_flags: which = which + 'n'
if FCNTL.O_SYNC & l_flags: which = which + 's'
return which
-
+
def lock(self, how, *args):
import struct, fcntl, FCNTL
@@ -176,7 +176,7 @@ class _posixfile_:
'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
'bsdos2', 'bsdos3', 'bsdos4'):
flock = struct.pack('lxxxxlxxxxlhh', \
- l_start, l_len, os.getpid(), l_type, l_whence)
+ l_start, l_len, os.getpid(), l_type, l_whence)
elif sys.platform in ['aix3', 'aix4']:
flock = struct.pack('hhlllii', \
l_type, l_whence, l_start, l_len, 0, 0, 0)
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index 32850b4..fd870b7 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -55,7 +55,7 @@ def join(a, *p):
# Trailing '/'es are stripped from head unless it is the root.
def split(p):
- """Split a pathname. Returns tuple "(head, tail)" where "tail" is
+ """Split a pathname. Returns tuple "(head, tail)" where "tail" is
everything after the final slash. Either part may be empty."""
i = p.rfind('/') + 1
head, tail = p[:i], p[i:]
@@ -93,7 +93,7 @@ def splitext(p):
# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
def splitdrive(p):
- """Split a pathname into drive and path. On Posix, drive is always
+ """Split a pathname into drive and path. On Posix, drive is always
empty."""
return '', p
@@ -220,7 +220,7 @@ def sameopenfile(fp1, fp2):
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return s1[stat.ST_INO] == s2[stat.ST_INO] and \
- s1[stat.ST_DEV] == s2[stat.ST_DEV]
+ s1[stat.ST_DEV] == s2[stat.ST_DEV]
# Is a path a mount point?
@@ -253,7 +253,7 @@ def ismount(path):
# or to impose a different order of visiting.
def walk(top, func, arg):
- """walk(top,func,arg) calls func(arg, d, files) for each directory "d"
+ """walk(top,func,arg) calls func(arg, d, files) for each directory "d"
in the tree rooted at "top" (including "top" itself). "files" is a list
of all the files and subdirs in directory "d".
"""
@@ -263,10 +263,10 @@ def walk(top, func, arg):
return
func(arg, top, names)
for name in names:
- name = join(top, name)
- st = os.lstat(name)
- if stat.S_ISDIR(st[stat.ST_MODE]):
- walk(name, func, arg)
+ name = join(top, name)
+ st = os.lstat(name)
+ if stat.S_ISDIR(st[stat.ST_MODE]):
+ walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
@@ -279,7 +279,7 @@ def walk(top, func, arg):
# variable expansion.)
def expanduser(path):
- """Expand ~ and ~user constructions. If user or $HOME is unknown,
+ """Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if path[:1] != '~':
return path
@@ -349,7 +349,7 @@ def normpath(path):
for comp in comps:
if comp in ('', '.'):
continue
- if (comp != '..' or (not initial_slash and not new_comps) or
+ if (comp != '..' or (not initial_slash and not new_comps) or
(new_comps and new_comps[-1] == '..')):
new_comps.append(comp)
elif new_comps:
diff --git a/Lib/pre.py b/Lib/pre.py
index 9dedb9c..adc1ddf 100644
--- a/Lib/pre.py
+++ b/Lib/pre.py
@@ -44,7 +44,7 @@ below. If the ordinary character is not on the list, then the
resulting RE will match the second character.
\\number Matches the contents of the group of the same number.
\\A Matches only at the start of the string.
- \\Z Matches only at the end of the string.
+ \\Z Matches only at the end of the string.
\\b Matches the empty string, but only at the start or end of a word.
\\B Matches the empty string, but not at the start or end of a word.
\\d Matches any decimal digit; equivalent to the set [0-9].
@@ -55,7 +55,7 @@ resulting RE will match the second character.
With LOCALE, it will match the set [0-9_] plus characters defined
as letters for the current locale.
\\W Matches the complement of \\w.
- \\\\ Matches a literal backslash.
+ \\\\ Matches a literal backslash.
This module exports the following functions:
match Match a regular expression pattern to the beginning of a string.
@@ -100,8 +100,8 @@ from pcre import *
I = IGNORECASE
L = LOCALE
M = MULTILINE
-S = DOTALL
-X = VERBOSE
+S = DOTALL
+X = VERBOSE
#
@@ -125,7 +125,7 @@ def _cachecompile(pattern, flags=0):
def match(pattern, string, flags=0):
"""match (pattern, string[, flags]) -> MatchObject or None
-
+
If zero or more characters at the beginning of string match the
regular expression pattern, return a corresponding MatchObject
instance. Return None if the string does not match the pattern;
@@ -135,12 +135,12 @@ def match(pattern, string, flags=0):
search() instead.
"""
-
+
return _cachecompile(pattern, flags).match(string)
-
+
def search(pattern, string, flags=0):
"""search (pattern, string[, flags]) -> MatchObject or None
-
+
Scan through string looking for a location where the regular
expression pattern produces a match, and return a corresponding
MatchObject instance. Return None if no position in the string
@@ -149,10 +149,10 @@ def search(pattern, string, flags=0):
"""
return _cachecompile(pattern, flags).search(string)
-
+
def sub(pattern, repl, string, count=0):
"""sub(pattern, repl, string[, count=0]) -> string
-
+
Return the string obtained by replacing the leftmost
non-overlapping occurrences of pattern in string by the
replacement repl. If the pattern isn't found, string is returned
@@ -177,7 +177,7 @@ def sub(pattern, repl, string, count=0):
def subn(pattern, repl, string, count=0):
"""subn(pattern, repl, string[, count=0]) -> (string, num substitutions)
-
+
Perform the same operation as sub(), but return a tuple
(new_string, number_of_subs_made).
@@ -185,10 +185,10 @@ def subn(pattern, repl, string, count=0):
if type(pattern) == type(''):
pattern = _cachecompile(pattern)
return pattern.subn(repl, string, count)
-
+
def split(pattern, string, maxsplit=0):
"""split(pattern, string[, maxsplit=0]) -> list of strings
-
+
Split string by the occurrences of pattern. If capturing
parentheses are used in pattern, then the text of all groups in
the pattern are also returned as part of the resulting list. If
@@ -203,7 +203,7 @@ def split(pattern, string, maxsplit=0):
def findall(pattern, string):
"""findall(pattern, string) -> list
-
+
Return a list of all non-overlapping matches of pattern in
string. If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern has
@@ -216,7 +216,7 @@ def findall(pattern, string):
def escape(pattern):
"""escape(string) -> string
-
+
Return string with all non-alphanumerics backslashed; this is
useful if you want to match an arbitrary literal string that may
have regular expression metacharacters in it.
@@ -242,7 +242,7 @@ def compile(pattern, flags=0):
groupindex={}
code=pcre_compile(pattern, flags, groupindex)
return RegexObject(pattern, flags, code, groupindex)
-
+
#
# Class definitions
@@ -258,18 +258,18 @@ class RegexObject:
subn Same as sub, but also return the number of substitutions made.
split Split a string by the occurrences of the pattern.
findall Find all occurrences of the pattern in a string.
-
+
"""
def __init__(self, pattern, flags, code, groupindex):
- self.code = code
+ self.code = code
self.flags = flags
self.pattern = pattern
self.groupindex = groupindex
def search(self, string, pos=0, endpos=None):
"""search(string[, pos][, endpos]) -> MatchObject or None
-
+
Scan through string looking for a location where this regular
expression produces a match, and return a corresponding
MatchObject instance. Return None if no position in the string
@@ -277,24 +277,24 @@ class RegexObject:
a zero-length match at some point in the string. The optional
pos and endpos parameters have the same meaning as for the
match() method.
-
+
"""
- if endpos is None or endpos>len(string):
+ if endpos is None or endpos>len(string):
endpos=len(string)
if endpos<pos: endpos=pos
regs = self.code.match(string, pos, endpos, 0)
if regs is None:
return None
self._num_regs=len(regs)
-
+
return MatchObject(self,
string,
pos, endpos,
regs)
-
+
def match(self, string, pos=0, endpos=None):
"""match(string[, pos][, endpos]) -> MatchObject or None
-
+
If zero or more characters at the beginning of string match
this regular expression, return a corresponding MatchObject
instance. Return None if the string does not match the
@@ -316,7 +316,7 @@ class RegexObject:
searched for a match.
"""
- if endpos is None or endpos>len(string):
+ if endpos is None or endpos>len(string):
endpos=len(string)
if endpos<pos: endpos=pos
regs = self.code.match(string, pos, endpos, ANCHORED)
@@ -327,23 +327,23 @@ class RegexObject:
string,
pos, endpos,
regs)
-
+
def sub(self, repl, string, count=0):
"""sub(repl, string[, count=0]) -> string
-
+
Return the string obtained by replacing the leftmost
non-overlapping occurrences of the compiled pattern in string
by the replacement repl. If the pattern isn't found, string is
returned unchanged.
Identical to the sub() function, using the compiled pattern.
-
+
"""
return self.subn(repl, string, count)[0]
-
- def subn(self, repl, source, count=0):
+
+ def subn(self, repl, source, count=0):
"""subn(repl, string[, count=0]) -> tuple
-
+
Perform the same operation as sub(), but return a tuple
(new_string, number_of_subs_made).
@@ -399,17 +399,17 @@ class RegexObject:
n = n + 1
append(source[pos:])
return (string.join(results, ''), n)
-
+
def split(self, source, maxsplit=0):
"""split(source[, maxsplit=0]) -> list of strings
-
+
Split string by the occurrences of the compiled pattern. If
capturing parentheses are used in the pattern, then the text
of all groups in the pattern are also returned as part of the
resulting list. If maxsplit is nonzero, at most maxsplit
splits occur, and the remainder of the string is returned as
the final element of the list.
-
+
"""
if maxsplit < 0:
raise error, "negative split count"
@@ -449,7 +449,7 @@ class RegexObject:
def findall(self, source):
"""findall(source) -> list
-
+
Return a list of all non-overlapping matches of the compiled
pattern in string. If one or more groups are present in the
pattern, return a list of groups; this will be a list of
@@ -487,7 +487,7 @@ class RegexObject:
def __getinitargs__(self):
return (None,None,None,None) # any 4 elements, to work around
# problems with the
- # pickle/cPickle modules not yet
+ # pickle/cPickle modules not yet
# ignoring the __init__ function
def __getstate__(self):
return self.pattern, self.flags, self.groupindex
@@ -517,13 +517,13 @@ class MatchObject:
def __init__(self, re, string, pos, endpos, regs):
self.re = re
self.string = string
- self.pos = pos
+ self.pos = pos
self.endpos = endpos
self.regs = regs
-
+
def start(self, g = 0):
"""start([group=0]) -> int or None
-
+
Return the index of the start of the substring matched by
group; group defaults to zero (meaning the whole matched
substring). Return -1 if group exists but did not contribute
@@ -536,10 +536,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g][0]
-
+
def end(self, g = 0):
"""end([group=0]) -> int or None
-
+
Return the indices of the end of the substring matched by
group; group defaults to zero (meaning the whole matched
substring). Return -1 if group exists but did not contribute
@@ -552,10 +552,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g][1]
-
+
def span(self, g = 0):
"""span([group=0]) -> tuple
-
+
Return the 2-tuple (m.start(group), m.end(group)). Note that
if group did not contribute to the match, this is (-1,
-1). Group defaults to zero (meaning the whole matched
@@ -568,10 +568,10 @@ class MatchObject:
except (KeyError, TypeError):
raise IndexError, 'group %s is undefined' % `g`
return self.regs[g]
-
+
def groups(self, default=None):
"""groups([default=None]) -> tuple
-
+
Return a tuple containing all the subgroups of the match, from
1 up to however many groups are in the pattern. The default
argument is used for groups that did not participate in the
@@ -589,7 +589,7 @@ class MatchObject:
def group(self, *groups):
"""group([group1, group2, ...]) -> string or tuple
-
+
Return one or more subgroups of the match. If there is a
single argument, the result is a single string; if there are
multiple arguments, the result is a tuple with one item per
@@ -636,7 +636,7 @@ class MatchObject:
def groupdict(self, default=None):
"""groupdict([default=None]) -> dictionary
-
+
Return a dictionary containing all the named subgroups of the
match, keyed by the subgroup name. The default argument is
used for groups that did not participate in the match.
diff --git a/Lib/profile.py b/Lib/profile.py
index feaf287..c32b3f8 100755
--- a/Lib/profile.py
+++ b/Lib/profile.py
@@ -11,7 +11,7 @@
# Copyright 1994, by InfoSeek Corporation, all rights reserved.
# Written by James Roskind
-#
+#
# Permission to use, copy, modify, and distribute this Python software
# and its associated documentation for any purpose (subject to the
# restriction in the following sentence) without fee is hereby granted,
@@ -24,7 +24,7 @@
# to remain in Python, compiled Python, or other languages (such as C)
# wherein the modified or derived code is exclusively imported into a
# Python module.
-#
+#
# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
@@ -41,12 +41,12 @@ import time
import marshal
-# Sample timer for use with
+# Sample timer for use with
#i_count = 0
#def integer_timer():
-# global i_count
-# i_count = i_count + 1
-# return i_count
+# global i_count
+# i_count = i_count + 1
+# return i_count
#itimes = integer_timer # replace with C coded timer returning integers
#**************************************************************************
@@ -57,515 +57,515 @@ import marshal
# simplified user interface
def run(statement, *args):
- prof = Profile()
- try:
- prof = prof.run(statement)
- except SystemExit:
- pass
- if args:
- prof.dump_stats(args[0])
- else:
- return prof.print_stats()
+ prof = Profile()
+ try:
+ prof = prof.run(statement)
+ except SystemExit:
+ pass
+ if args:
+ prof.dump_stats(args[0])
+ else:
+ return prof.print_stats()
# print help
def help():
- for dirname in sys.path:
- fullname = os.path.join(dirname, 'profile.doc')
- if os.path.exists(fullname):
- sts = os.system('${PAGER-more} '+fullname)
- if sts: print '*** Pager exit status:', sts
- break
- else:
- print 'Sorry, can\'t find the help file "profile.doc"',
- print 'along the Python search path'
+ for dirname in sys.path:
+ fullname = os.path.join(dirname, 'profile.doc')
+ if os.path.exists(fullname):
+ sts = os.system('${PAGER-more} '+fullname)
+ if sts: print '*** Pager exit status:', sts
+ break
+ else:
+ print 'Sorry, can\'t find the help file "profile.doc"',
+ print 'along the Python search path'
class Profile:
- """Profiler class.
-
- self.cur is always a tuple. Each such tuple corresponds to a stack
- frame that is currently active (self.cur[-2]). The following are the
- definitions of its members. We use this external "parallel stack" to
- avoid contaminating the program that we are profiling. (old profiler
- used to write into the frames local dictionary!!) Derived classes
- can change the definition of some entries, as long as they leave
- [-2:] intact.
-
- [ 0] = Time that needs to be charged to the parent frame's function.
- It is used so that a function call will not have to access the
- timing data for the parent frame.
- [ 1] = Total time spent in this frame's function, excluding time in
- subfunctions
- [ 2] = Cumulative time spent in this frame's function, including time in
- all subfunctions to this frame.
- [-3] = Name of the function that corresponds to this frame.
- [-2] = Actual frame that we correspond to (used to sync exception handling)
- [-1] = Our parent 6-tuple (corresponds to frame.f_back)
-
- Timing data for each function is stored as a 5-tuple in the dictionary
- self.timings[]. The index is always the name stored in self.cur[4].
- The following are the definitions of the members:
-
- [0] = The number of times this function was called, not counting direct
- or indirect recursion,
- [1] = Number of times this function appears on the stack, minus one
- [2] = Total time spent internal to this function
- [3] = Cumulative time that this function was present on the stack. In
- non-recursive functions, this is the total execution time from start
- to finish of each invocation of a function, including time spent in
- all subfunctions.
- [5] = A dictionary indicating for each function name, the number of times
- it was called by us.
- """
-
- def __init__(self, timer=None):
- self.timings = {}
- self.cur = None
- self.cmd = ""
-
- self.dispatch = { \
- 'call' : self.trace_dispatch_call, \
- 'return' : self.trace_dispatch_return, \
- 'exception': self.trace_dispatch_exception, \
- }
-
- if not timer:
- if os.name == 'mac':
- import MacOS
- self.timer = MacOS.GetTicks
- self.dispatcher = self.trace_dispatch_mac
- self.get_time = self.get_time_mac
- elif hasattr(time, 'clock'):
- self.timer = time.clock
- self.dispatcher = self.trace_dispatch_i
- elif hasattr(os, 'times'):
- self.timer = os.times
- self.dispatcher = self.trace_dispatch
- else:
- self.timer = time.time
- self.dispatcher = self.trace_dispatch_i
- else:
- self.timer = timer
- t = self.timer() # test out timer function
- try:
- if len(t) == 2:
- self.dispatcher = self.trace_dispatch
- else:
- self.dispatcher = self.trace_dispatch_l
- except TypeError:
- self.dispatcher = self.trace_dispatch_i
- self.t = self.get_time()
- self.simulate_call('profiler')
-
-
- def get_time(self): # slow simulation of method to acquire time
- t = self.timer()
- if type(t) == type(()) or type(t) == type([]):
- t = reduce(lambda x,y: x+y, t, 0)
- return t
-
- def get_time_mac(self):
- return self.timer()/60.0
-
- # Heavily optimized dispatch routine for os.times() timer
-
- def trace_dispatch(self, frame, event, arg):
- t = self.timer()
- t = t[0] + t[1] - self.t # No Calibration constant
- # t = t[0] + t[1] - self.t - .00053 # Calibration constant
-
- if self.dispatch[event](frame,t):
- t = self.timer()
- self.t = t[0] + t[1]
- else:
- r = self.timer()
- self.t = r[0] + r[1] - t # put back unrecorded delta
- return
-
-
-
- # Dispatch routine for best timer program (return = scalar integer)
-
- def trace_dispatch_i(self, frame, event, arg):
- t = self.timer() - self.t # - 1 # Integer calibration constant
- if self.dispatch[event](frame,t):
- self.t = self.timer()
- else:
- self.t = self.timer() - t # put back unrecorded delta
- return
-
- # Dispatch routine for macintosh (timer returns time in ticks of 1/60th second)
-
- def trace_dispatch_mac(self, frame, event, arg):
- t = self.timer()/60.0 - self.t # - 1 # Integer calibration constant
- if self.dispatch[event](frame,t):
- self.t = self.timer()/60.0
- else:
- self.t = self.timer()/60.0 - t # put back unrecorded delta
- return
-
-
- # SLOW generic dispatch routine for timer returning lists of numbers
-
- def trace_dispatch_l(self, frame, event, arg):
- t = self.get_time() - self.t
-
- if self.dispatch[event](frame,t):
- self.t = self.get_time()
- else:
- self.t = self.get_time()-t # put back unrecorded delta
- return
-
-
- def trace_dispatch_exception(self, frame, t):
- rt, rtt, rct, rfn, rframe, rcur = self.cur
- if (not rframe is frame) and rcur:
- return self.trace_dispatch_return(rframe, t)
- return 0
-
-
- def trace_dispatch_call(self, frame, t):
- fcode = frame.f_code
- fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
- self.cur = (t, 0, 0, fn, frame, self.cur)
- if self.timings.has_key(fn):
- cc, ns, tt, ct, callers = self.timings[fn]
- self.timings[fn] = cc, ns + 1, tt, ct, callers
- else:
- self.timings[fn] = 0, 0, 0, 0, {}
- return 1
-
- def trace_dispatch_return(self, frame, t):
- # if not frame is self.cur[-2]: raise "Bad return", self.cur[3]
-
- # Prefix "r" means part of the Returning or exiting frame
- # Prefix "p" means part of the Previous or older frame
-
- rt, rtt, rct, rfn, frame, rcur = self.cur
- rtt = rtt + t
- sft = rtt + rct
-
- pt, ptt, pct, pfn, pframe, pcur = rcur
- self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
-
- cc, ns, tt, ct, callers = self.timings[rfn]
- if not ns:
- ct = ct + sft
- cc = cc + 1
- if callers.has_key(pfn):
- callers[pfn] = callers[pfn] + 1 # hack: gather more
- # stats such as the amount of time added to ct courtesy
- # of this specific call, and the contribution to cc
- # courtesy of this call.
- else:
- callers[pfn] = 1
- self.timings[rfn] = cc, ns - 1, tt+rtt, ct, callers
-
- return 1
-
- # The next few function play with self.cmd. By carefully preloading
- # our parallel stack, we can force the profiled result to include
- # an arbitrary string as the name of the calling function.
- # We use self.cmd as that string, and the resulting stats look
- # very nice :-).
-
- def set_cmd(self, cmd):
- if self.cur[-1]: return # already set
- self.cmd = cmd
- self.simulate_call(cmd)
-
- class fake_code:
- def __init__(self, filename, line, name):
- self.co_filename = filename
- self.co_line = line
- self.co_name = name
- self.co_firstlineno = 0
-
- def __repr__(self):
- return repr((self.co_filename, self.co_line, self.co_name))
-
- class fake_frame:
- def __init__(self, code, prior):
- self.f_code = code
- self.f_back = prior
-
- def simulate_call(self, name):
- code = self.fake_code('profile', 0, name)
- if self.cur:
- pframe = self.cur[-2]
- else:
- pframe = None
- frame = self.fake_frame(code, pframe)
- a = self.dispatch['call'](frame, 0)
- return
-
- # collect stats from pending stack, including getting final
- # timings for self.cmd frame.
-
- def simulate_cmd_complete(self):
- t = self.get_time() - self.t
- while self.cur[-1]:
- # We *can* cause assertion errors here if
- # dispatch_trace_return checks for a frame match!
- a = self.dispatch['return'](self.cur[-2], t)
- t = 0
- self.t = self.get_time() - t
-
-
- def print_stats(self):
- import pstats
- pstats.Stats(self).strip_dirs().sort_stats(-1). \
- print_stats()
-
- def dump_stats(self, file):
- f = open(file, 'wb')
- self.create_stats()
- marshal.dump(self.stats, f)
- f.close()
-
- def create_stats(self):
- self.simulate_cmd_complete()
- self.snapshot_stats()
-
- def snapshot_stats(self):
- self.stats = {}
- for func in self.timings.keys():
- cc, ns, tt, ct, callers = self.timings[func]
- callers = callers.copy()
- nc = 0
- for func_caller in callers.keys():
- nc = nc + callers[func_caller]
- self.stats[func] = cc, nc, tt, ct, callers
-
-
- # The following two methods can be called by clients to use
- # a profiler to profile a statement, given as a string.
-
- def run(self, cmd):
- import __main__
- dict = __main__.__dict__
- return self.runctx(cmd, dict, dict)
-
- def runctx(self, cmd, globals, locals):
- self.set_cmd(cmd)
- sys.setprofile(self.dispatcher)
- try:
- exec cmd in globals, locals
- finally:
- sys.setprofile(None)
- return self
-
- # This method is more useful to profile a single function call.
- def runcall(self, func, *args):
- self.set_cmd(`func`)
- sys.setprofile(self.dispatcher)
- try:
- return apply(func, args)
- finally:
- sys.setprofile(None)
-
-
- #******************************************************************
- # The following calculates the overhead for using a profiler. The
- # problem is that it takes a fair amount of time for the profiler
- # to stop the stopwatch (from the time it receives an event).
- # Similarly, there is a delay from the time that the profiler
- # re-starts the stopwatch before the user's code really gets to
- # continue. The following code tries to measure the difference on
- # a per-event basis. The result can the be placed in the
- # Profile.dispatch_event() routine for the given platform. Note
- # that this difference is only significant if there are a lot of
- # events, and relatively little user code per event. For example,
- # code with small functions will typically benefit from having the
- # profiler calibrated for the current platform. This *could* be
- # done on the fly during init() time, but it is not worth the
- # effort. Also note that if too large a value specified, then
- # execution time on some functions will actually appear as a
- # negative number. It is *normal* for some functions (with very
- # low call counts) to have such negative stats, even if the
- # calibration figure is "correct."
- #
- # One alternative to profile-time calibration adjustments (i.e.,
- # adding in the magic little delta during each event) is to track
- # more carefully the number of events (and cumulatively, the number
- # of events during sub functions) that are seen. If this were
- # done, then the arithmetic could be done after the fact (i.e., at
- # display time). Currently, we track only call/return events.
- # These values can be deduced by examining the callees and callers
- # vectors for each functions. Hence we *can* almost correct the
- # internal time figure at print time (note that we currently don't
- # track exception event processing counts). Unfortunately, there
- # is currently no similar information for cumulative sub-function
- # time. It would not be hard to "get all this info" at profiler
- # time. Specifically, we would have to extend the tuples to keep
- # counts of this in each frame, and then extend the defs of timing
- # tuples to include the significant two figures. I'm a bit fearful
- # that this additional feature will slow the heavily optimized
- # event/time ratio (i.e., the profiler would run slower, fur a very
- # low "value added" feature.)
- #
- # Plugging in the calibration constant doesn't slow down the
- # profiler very much, and the accuracy goes way up.
- #**************************************************************
-
- def calibrate(self, m):
- # Modified by Tim Peters
- n = m
- s = self.get_time()
- while n:
- self.simple()
- n = n - 1
- f = self.get_time()
- my_simple = f - s
- #print "Simple =", my_simple,
-
- n = m
- s = self.get_time()
- while n:
- self.instrumented()
- n = n - 1
- f = self.get_time()
- my_inst = f - s
- # print "Instrumented =", my_inst
- avg_cost = (my_inst - my_simple)/m
- #print "Delta/call =", avg_cost, "(profiler fixup constant)"
- return avg_cost
-
- # simulate a program with no profiler activity
- def simple(self):
- a = 1
- pass
-
- # simulate a program with call/return event processing
- def instrumented(self):
- a = 1
- self.profiler_simulation(a, a, a)
-
- # simulate an event processing activity (from user's perspective)
- def profiler_simulation(self, x, y, z):
- t = self.timer()
- ## t = t[0] + t[1]
- self.ut = t
+ """Profiler class.
+
+ self.cur is always a tuple. Each such tuple corresponds to a stack
+ frame that is currently active (self.cur[-2]). The following are the
+ definitions of its members. We use this external "parallel stack" to
+ avoid contaminating the program that we are profiling. (old profiler
+ used to write into the frames local dictionary!!) Derived classes
+ can change the definition of some entries, as long as they leave
+ [-2:] intact.
+
+ [ 0] = Time that needs to be charged to the parent frame's function.
+ It is used so that a function call will not have to access the
+ timing data for the parent frame.
+ [ 1] = Total time spent in this frame's function, excluding time in
+ subfunctions
+ [ 2] = Cumulative time spent in this frame's function, including time in
+ all subfunctions to this frame.
+ [-3] = Name of the function that corresponds to this frame.
+ [-2] = Actual frame that we correspond to (used to sync exception handling)
+ [-1] = Our parent 6-tuple (corresponds to frame.f_back)
+
+ Timing data for each function is stored as a 5-tuple in the dictionary
+ self.timings[]. The index is always the name stored in self.cur[4].
+ The following are the definitions of the members:
+
+ [0] = The number of times this function was called, not counting direct
+ or indirect recursion,
+ [1] = Number of times this function appears on the stack, minus one
+ [2] = Total time spent internal to this function
+ [3] = Cumulative time that this function was present on the stack. In
+ non-recursive functions, this is the total execution time from start
+ to finish of each invocation of a function, including time spent in
+ all subfunctions.
+ [5] = A dictionary indicating for each function name, the number of times
+ it was called by us.
+ """
+
+ def __init__(self, timer=None):
+ self.timings = {}
+ self.cur = None
+ self.cmd = ""
+
+ self.dispatch = { \
+ 'call' : self.trace_dispatch_call, \
+ 'return' : self.trace_dispatch_return, \
+ 'exception': self.trace_dispatch_exception, \
+ }
+
+ if not timer:
+ if os.name == 'mac':
+ import MacOS
+ self.timer = MacOS.GetTicks
+ self.dispatcher = self.trace_dispatch_mac
+ self.get_time = self.get_time_mac
+ elif hasattr(time, 'clock'):
+ self.timer = time.clock
+ self.dispatcher = self.trace_dispatch_i
+ elif hasattr(os, 'times'):
+ self.timer = os.times
+ self.dispatcher = self.trace_dispatch
+ else:
+ self.timer = time.time
+ self.dispatcher = self.trace_dispatch_i
+ else:
+ self.timer = timer
+ t = self.timer() # test out timer function
+ try:
+ if len(t) == 2:
+ self.dispatcher = self.trace_dispatch
+ else:
+ self.dispatcher = self.trace_dispatch_l
+ except TypeError:
+ self.dispatcher = self.trace_dispatch_i
+ self.t = self.get_time()
+ self.simulate_call('profiler')
+
+
+ def get_time(self): # slow simulation of method to acquire time
+ t = self.timer()
+ if type(t) == type(()) or type(t) == type([]):
+ t = reduce(lambda x,y: x+y, t, 0)
+ return t
+
+ def get_time_mac(self):
+ return self.timer()/60.0
+
+ # Heavily optimized dispatch routine for os.times() timer
+
+ def trace_dispatch(self, frame, event, arg):
+ t = self.timer()
+ t = t[0] + t[1] - self.t # No Calibration constant
+ # t = t[0] + t[1] - self.t - .00053 # Calibration constant
+
+ if self.dispatch[event](frame,t):
+ t = self.timer()
+ self.t = t[0] + t[1]
+ else:
+ r = self.timer()
+ self.t = r[0] + r[1] - t # put back unrecorded delta
+ return
+
+
+
+ # Dispatch routine for best timer program (return = scalar integer)
+
+ def trace_dispatch_i(self, frame, event, arg):
+ t = self.timer() - self.t # - 1 # Integer calibration constant
+ if self.dispatch[event](frame,t):
+ self.t = self.timer()
+ else:
+ self.t = self.timer() - t # put back unrecorded delta
+ return
+
+ # Dispatch routine for macintosh (timer returns time in ticks of 1/60th second)
+
+ def trace_dispatch_mac(self, frame, event, arg):
+ t = self.timer()/60.0 - self.t # - 1 # Integer calibration constant
+ if self.dispatch[event](frame,t):
+ self.t = self.timer()/60.0
+ else:
+ self.t = self.timer()/60.0 - t # put back unrecorded delta
+ return
+
+
+ # SLOW generic dispatch routine for timer returning lists of numbers
+
+ def trace_dispatch_l(self, frame, event, arg):
+ t = self.get_time() - self.t
+
+ if self.dispatch[event](frame,t):
+ self.t = self.get_time()
+ else:
+ self.t = self.get_time()-t # put back unrecorded delta
+ return
+
+
+ def trace_dispatch_exception(self, frame, t):
+ rt, rtt, rct, rfn, rframe, rcur = self.cur
+ if (not rframe is frame) and rcur:
+ return self.trace_dispatch_return(rframe, t)
+ return 0
+
+
+ def trace_dispatch_call(self, frame, t):
+ fcode = frame.f_code
+ fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
+ self.cur = (t, 0, 0, fn, frame, self.cur)
+ if self.timings.has_key(fn):
+ cc, ns, tt, ct, callers = self.timings[fn]
+ self.timings[fn] = cc, ns + 1, tt, ct, callers
+ else:
+ self.timings[fn] = 0, 0, 0, 0, {}
+ return 1
+
+ def trace_dispatch_return(self, frame, t):
+ # if not frame is self.cur[-2]: raise "Bad return", self.cur[3]
+
+ # Prefix "r" means part of the Returning or exiting frame
+ # Prefix "p" means part of the Previous or older frame
+
+ rt, rtt, rct, rfn, frame, rcur = self.cur
+ rtt = rtt + t
+ sft = rtt + rct
+
+ pt, ptt, pct, pfn, pframe, pcur = rcur
+ self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
+
+ cc, ns, tt, ct, callers = self.timings[rfn]
+ if not ns:
+ ct = ct + sft
+ cc = cc + 1
+ if callers.has_key(pfn):
+ callers[pfn] = callers[pfn] + 1 # hack: gather more
+ # stats such as the amount of time added to ct courtesy
+ # of this specific call, and the contribution to cc
+ # courtesy of this call.
+ else:
+ callers[pfn] = 1
+ self.timings[rfn] = cc, ns - 1, tt+rtt, ct, callers
+
+ return 1
+
+ # The next few function play with self.cmd. By carefully preloading
+ # our parallel stack, we can force the profiled result to include
+ # an arbitrary string as the name of the calling function.
+ # We use self.cmd as that string, and the resulting stats look
+ # very nice :-).
+
+ def set_cmd(self, cmd):
+ if self.cur[-1]: return # already set
+ self.cmd = cmd
+ self.simulate_call(cmd)
+
+ class fake_code:
+ def __init__(self, filename, line, name):
+ self.co_filename = filename
+ self.co_line = line
+ self.co_name = name
+ self.co_firstlineno = 0
+
+ def __repr__(self):
+ return repr((self.co_filename, self.co_line, self.co_name))
+
+ class fake_frame:
+ def __init__(self, code, prior):
+ self.f_code = code
+ self.f_back = prior
+
+ def simulate_call(self, name):
+ code = self.fake_code('profile', 0, name)
+ if self.cur:
+ pframe = self.cur[-2]
+ else:
+ pframe = None
+ frame = self.fake_frame(code, pframe)
+ a = self.dispatch['call'](frame, 0)
+ return
+
+ # collect stats from pending stack, including getting final
+ # timings for self.cmd frame.
+
+ def simulate_cmd_complete(self):
+ t = self.get_time() - self.t
+ while self.cur[-1]:
+ # We *can* cause assertion errors here if
+ # dispatch_trace_return checks for a frame match!
+ a = self.dispatch['return'](self.cur[-2], t)
+ t = 0
+ self.t = self.get_time() - t
+
+
+ def print_stats(self):
+ import pstats
+ pstats.Stats(self).strip_dirs().sort_stats(-1). \
+ print_stats()
+
+ def dump_stats(self, file):
+ f = open(file, 'wb')
+ self.create_stats()
+ marshal.dump(self.stats, f)
+ f.close()
+
+ def create_stats(self):
+ self.simulate_cmd_complete()
+ self.snapshot_stats()
+
+ def snapshot_stats(self):
+ self.stats = {}
+ for func in self.timings.keys():
+ cc, ns, tt, ct, callers = self.timings[func]
+ callers = callers.copy()
+ nc = 0
+ for func_caller in callers.keys():
+ nc = nc + callers[func_caller]
+ self.stats[func] = cc, nc, tt, ct, callers
+
+
+ # The following two methods can be called by clients to use
+ # a profiler to profile a statement, given as a string.
+
+ def run(self, cmd):
+ import __main__
+ dict = __main__.__dict__
+ return self.runctx(cmd, dict, dict)
+
+ def runctx(self, cmd, globals, locals):
+ self.set_cmd(cmd)
+ sys.setprofile(self.dispatcher)
+ try:
+ exec cmd in globals, locals
+ finally:
+ sys.setprofile(None)
+ return self
+
+ # This method is more useful to profile a single function call.
+ def runcall(self, func, *args):
+ self.set_cmd(`func`)
+ sys.setprofile(self.dispatcher)
+ try:
+ return apply(func, args)
+ finally:
+ sys.setprofile(None)
+
+
+ #******************************************************************
+ # The following calculates the overhead for using a profiler. The
+ # problem is that it takes a fair amount of time for the profiler
+ # to stop the stopwatch (from the time it receives an event).
+ # Similarly, there is a delay from the time that the profiler
+ # re-starts the stopwatch before the user's code really gets to
+ # continue. The following code tries to measure the difference on
+ # a per-event basis. The result can the be placed in the
+ # Profile.dispatch_event() routine for the given platform. Note
+ # that this difference is only significant if there are a lot of
+ # events, and relatively little user code per event. For example,
+ # code with small functions will typically benefit from having the
+ # profiler calibrated for the current platform. This *could* be
+ # done on the fly during init() time, but it is not worth the
+ # effort. Also note that if too large a value specified, then
+ # execution time on some functions will actually appear as a
+ # negative number. It is *normal* for some functions (with very
+ # low call counts) to have such negative stats, even if the
+ # calibration figure is "correct."
+ #
+ # One alternative to profile-time calibration adjustments (i.e.,
+ # adding in the magic little delta during each event) is to track
+ # more carefully the number of events (and cumulatively, the number
+ # of events during sub functions) that are seen. If this were
+ # done, then the arithmetic could be done after the fact (i.e., at
+ # display time). Currently, we track only call/return events.
+ # These values can be deduced by examining the callees and callers
+ # vectors for each functions. Hence we *can* almost correct the
+ # internal time figure at print time (note that we currently don't
+ # track exception event processing counts). Unfortunately, there
+ # is currently no similar information for cumulative sub-function
+ # time. It would not be hard to "get all this info" at profiler
+ # time. Specifically, we would have to extend the tuples to keep
+ # counts of this in each frame, and then extend the defs of timing
+ # tuples to include the significant two figures. I'm a bit fearful
+ # that this additional feature will slow the heavily optimized
+ # event/time ratio (i.e., the profiler would run slower, fur a very
+ # low "value added" feature.)
+ #
+ # Plugging in the calibration constant doesn't slow down the
+ # profiler very much, and the accuracy goes way up.
+ #**************************************************************
+
+ def calibrate(self, m):
+ # Modified by Tim Peters
+ n = m
+ s = self.get_time()
+ while n:
+ self.simple()
+ n = n - 1
+ f = self.get_time()
+ my_simple = f - s
+ #print "Simple =", my_simple,
+
+ n = m
+ s = self.get_time()
+ while n:
+ self.instrumented()
+ n = n - 1
+ f = self.get_time()
+ my_inst = f - s
+ # print "Instrumented =", my_inst
+ avg_cost = (my_inst - my_simple)/m
+ #print "Delta/call =", avg_cost, "(profiler fixup constant)"
+ return avg_cost
+
+ # simulate a program with no profiler activity
+ def simple(self):
+ a = 1
+ pass
+
+ # simulate a program with call/return event processing
+ def instrumented(self):
+ a = 1
+ self.profiler_simulation(a, a, a)
+
+ # simulate an event processing activity (from user's perspective)
+ def profiler_simulation(self, x, y, z):
+ t = self.timer()
+ ## t = t[0] + t[1]
+ self.ut = t
class OldProfile(Profile):
- """A derived profiler that simulates the old style profile, providing
- errant results on recursive functions. The reason for the usefulness of
- this profiler is that it runs faster (i.e., less overhead). It still
- creates all the caller stats, and is quite useful when there is *no*
- recursion in the user's code.
-
- This code also shows how easy it is to create a modified profiler.
- """
-
- def trace_dispatch_exception(self, frame, t):
- rt, rtt, rct, rfn, rframe, rcur = self.cur
- if rcur and not rframe is frame:
- return self.trace_dispatch_return(rframe, t)
- return 0
-
- def trace_dispatch_call(self, frame, t):
- fn = `frame.f_code`
-
- self.cur = (t, 0, 0, fn, frame, self.cur)
- if self.timings.has_key(fn):
- tt, ct, callers = self.timings[fn]
- self.timings[fn] = tt, ct, callers
- else:
- self.timings[fn] = 0, 0, {}
- return 1
-
- def trace_dispatch_return(self, frame, t):
- rt, rtt, rct, rfn, frame, rcur = self.cur
- rtt = rtt + t
- sft = rtt + rct
-
- pt, ptt, pct, pfn, pframe, pcur = rcur
- self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
-
- tt, ct, callers = self.timings[rfn]
- if callers.has_key(pfn):
- callers[pfn] = callers[pfn] + 1
- else:
- callers[pfn] = 1
- self.timings[rfn] = tt+rtt, ct + sft, callers
-
- return 1
-
-
- def snapshot_stats(self):
- self.stats = {}
- for func in self.timings.keys():
- tt, ct, callers = self.timings[func]
- callers = callers.copy()
- nc = 0
- for func_caller in callers.keys():
- nc = nc + callers[func_caller]
- self.stats[func] = nc, nc, tt, ct, callers
-
-
+ """A derived profiler that simulates the old style profile, providing
+ errant results on recursive functions. The reason for the usefulness of
+ this profiler is that it runs faster (i.e., less overhead). It still
+ creates all the caller stats, and is quite useful when there is *no*
+ recursion in the user's code.
+
+ This code also shows how easy it is to create a modified profiler.
+ """
+
+ def trace_dispatch_exception(self, frame, t):
+ rt, rtt, rct, rfn, rframe, rcur = self.cur
+ if rcur and not rframe is frame:
+ return self.trace_dispatch_return(rframe, t)
+ return 0
+
+ def trace_dispatch_call(self, frame, t):
+ fn = `frame.f_code`
+
+ self.cur = (t, 0, 0, fn, frame, self.cur)
+ if self.timings.has_key(fn):
+ tt, ct, callers = self.timings[fn]
+ self.timings[fn] = tt, ct, callers
+ else:
+ self.timings[fn] = 0, 0, {}
+ return 1
+
+ def trace_dispatch_return(self, frame, t):
+ rt, rtt, rct, rfn, frame, rcur = self.cur
+ rtt = rtt + t
+ sft = rtt + rct
+
+ pt, ptt, pct, pfn, pframe, pcur = rcur
+ self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
+
+ tt, ct, callers = self.timings[rfn]
+ if callers.has_key(pfn):
+ callers[pfn] = callers[pfn] + 1
+ else:
+ callers[pfn] = 1
+ self.timings[rfn] = tt+rtt, ct + sft, callers
+
+ return 1
+
+
+ def snapshot_stats(self):
+ self.stats = {}
+ for func in self.timings.keys():
+ tt, ct, callers = self.timings[func]
+ callers = callers.copy()
+ nc = 0
+ for func_caller in callers.keys():
+ nc = nc + callers[func_caller]
+ self.stats[func] = nc, nc, tt, ct, callers
+
+
class HotProfile(Profile):
- """The fastest derived profile example. It does not calculate
- caller-callee relationships, and does not calculate cumulative
- time under a function. It only calculates time spent in a
- function, so it runs very quickly due to its very low overhead.
- """
+ """The fastest derived profile example. It does not calculate
+ caller-callee relationships, and does not calculate cumulative
+ time under a function. It only calculates time spent in a
+ function, so it runs very quickly due to its very low overhead.
+ """
+
+ def trace_dispatch_exception(self, frame, t):
+ rt, rtt, rfn, rframe, rcur = self.cur
+ if rcur and not rframe is frame:
+ return self.trace_dispatch_return(rframe, t)
+ return 0
- def trace_dispatch_exception(self, frame, t):
- rt, rtt, rfn, rframe, rcur = self.cur
- if rcur and not rframe is frame:
- return self.trace_dispatch_return(rframe, t)
- return 0
+ def trace_dispatch_call(self, frame, t):
+ self.cur = (t, 0, frame, self.cur)
+ return 1
- def trace_dispatch_call(self, frame, t):
- self.cur = (t, 0, frame, self.cur)
- return 1
+ def trace_dispatch_return(self, frame, t):
+ rt, rtt, frame, rcur = self.cur
- def trace_dispatch_return(self, frame, t):
- rt, rtt, frame, rcur = self.cur
+ rfn = `frame.f_code`
- rfn = `frame.f_code`
+ pt, ptt, pframe, pcur = rcur
+ self.cur = pt, ptt+rt, pframe, pcur
- pt, ptt, pframe, pcur = rcur
- self.cur = pt, ptt+rt, pframe, pcur
+ if self.timings.has_key(rfn):
+ nc, tt = self.timings[rfn]
+ self.timings[rfn] = nc + 1, rt + rtt + tt
+ else:
+ self.timings[rfn] = 1, rt + rtt
- if self.timings.has_key(rfn):
- nc, tt = self.timings[rfn]
- self.timings[rfn] = nc + 1, rt + rtt + tt
- else:
- self.timings[rfn] = 1, rt + rtt
+ return 1
- return 1
+ def snapshot_stats(self):
+ self.stats = {}
+ for func in self.timings.keys():
+ nc, tt = self.timings[func]
+ self.stats[func] = nc, nc, tt, 0, {}
- def snapshot_stats(self):
- self.stats = {}
- for func in self.timings.keys():
- nc, tt = self.timings[func]
- self.stats[func] = nc, nc, tt, 0, {}
-
#****************************************************************************
def Stats(*args):
- print 'Report generating functions are in the "pstats" module\a'
+ print 'Report generating functions are in the "pstats" module\a'
# When invoked as main program, invoke the profiler on a script
if __name__ == '__main__':
- import sys
- import os
- if not sys.argv[1:]:
- print "usage: profile.py scriptfile [arg] ..."
- sys.exit(2)
+ import sys
+ import os
+ if not sys.argv[1:]:
+ print "usage: profile.py scriptfile [arg] ..."
+ sys.exit(2)
- filename = sys.argv[1] # Get script filename
+ filename = sys.argv[1] # Get script filename
- del sys.argv[0] # Hide "profile.py" from argument list
+ del sys.argv[0] # Hide "profile.py" from argument list
- # Insert script directory in front of module search path
- sys.path.insert(0, os.path.dirname(filename))
+ # Insert script directory in front of module search path
+ sys.path.insert(0, os.path.dirname(filename))
- run('execfile(' + `filename` + ')')
+ run('execfile(' + `filename` + ')')
diff --git a/Lib/pstats.py b/Lib/pstats.py
index 558962e..9923b95 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -9,7 +9,7 @@
# Copyright 1994, by InfoSeek Corporation, all rights reserved.
# Written by James Roskind
-#
+#
# Permission to use, copy, modify, and distribute this Python software
# and its associated documentation for any purpose (subject to the
# restriction in the following sentence) without fee is hereby granted,
@@ -22,7 +22,7 @@
# to remain in Python, compiled Python, or other languages (such as C)
# wherein the modified or derived code is exclusively imported into a
# Python module.
-#
+#
# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
@@ -41,486 +41,485 @@ import re
import fpformat
class Stats:
- """This class is used for creating reports from data generated by the
- Profile class. It is a "friend" of that class, and imports data either
- by direct access to members of Profile class, or by reading in a dictionary
- that was emitted (via marshal) from the Profile class.
-
- The big change from the previous Profiler (in terms of raw functionality)
- is that an "add()" method has been provided to combine Stats from
- several distinct profile runs. Both the constructor and the add()
- method now take arbitrarily many file names as arguments.
-
- All the print methods now take an argument that indicates how many lines
- to print. If the arg is a floating point number between 0 and 1.0, then
- it is taken as a decimal percentage of the available lines to be printed
- (e.g., .1 means print 10% of all available lines). If it is an integer,
- it is taken to mean the number of lines of data that you wish to have
- printed.
-
- The sort_stats() method now processes some additional options (i.e., in
- addition to the old -1, 0, 1, or 2). It takes an arbitrary number of quoted
- strings to select the sort order. For example sort_stats('time', 'name')
- sorts on the major key of "internal function time", and on the minor
- key of 'the name of the function'. Look at the two tables in sort_stats()
- and get_sort_arg_defs(self) for more examples.
-
- All methods now return "self", so you can string together commands like:
- Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
- print_stats(5).print_callers(5)
- """
-
- def __init__(self, *args):
- if not len(args):
- arg = None
- else:
- arg = args[0]
- args = args[1:]
- self.init(arg)
- apply(self.add, args).ignore()
-
- def init(self, arg):
- self.all_callees = None # calc only if needed
- self.files = []
- self.fcn_list = None
- self.total_tt = 0
- self.total_calls = 0
- self.prim_calls = 0
- self.max_name_len = 0
- self.top_level = {}
- self.stats = {}
- self.sort_arg_dict = {}
- self.load_stats(arg)
- trouble = 1
- try:
- self.get_top_level_stats()
- trouble = 0
- finally:
- if trouble:
- print "Invalid timing data",
- if self.files: print self.files[-1],
- print
-
-
- def load_stats(self, arg):
- if not arg: self.stats = {}
- elif type(arg) == type(""):
- f = open(arg, 'rb')
- self.stats = marshal.load(f)
- f.close()
- try:
- file_stats = os.stat(arg)
- arg = time.ctime(file_stats[8]) + " " + arg
- except: # in case this is not unix
- pass
- self.files = [ arg ]
- elif hasattr(arg, 'create_stats'):
- arg.create_stats()
- self.stats = arg.stats
- arg.stats = {}
- if not self.stats:
- raise TypeError, "Cannot create or construct a " \
- + `self.__class__` \
- + " object from '" + `arg` + "'"
- return
-
- def get_top_level_stats(self):
- for func in self.stats.keys():
- cc, nc, tt, ct, callers = self.stats[func]
- self.total_calls = self.total_calls + nc
- self.prim_calls = self.prim_calls + cc
- self.total_tt = self.total_tt + tt
- if callers.has_key(("jprofile", 0, "profiler")):
- self.top_level[func] = None
- if len(func_std_string(func)) > self.max_name_len:
- self.max_name_len = len(func_std_string(func))
-
- def add(self, *arg_list):
- if not arg_list: return self
- if len(arg_list) > 1: apply(self.add, arg_list[1:])
- other = arg_list[0]
- if type(self) != type(other) or \
- self.__class__ != other.__class__:
- other = Stats(other)
- self.files = self.files + other.files
- self.total_calls = self.total_calls + other.total_calls
- self.prim_calls = self.prim_calls + other.prim_calls
- self.total_tt = self.total_tt + other.total_tt
- for func in other.top_level.keys():
- self.top_level[func] = None
-
- if self.max_name_len < other.max_name_len:
- self.max_name_len = other.max_name_len
-
- self.fcn_list = None
-
- for func in other.stats.keys():
- if self.stats.has_key(func):
- old_func_stat = self.stats[func]
- else:
- old_func_stat = (0, 0, 0, 0, {},)
- self.stats[func] = add_func_stats(old_func_stat, \
- other.stats[func])
- return self
-
-
-
- # list the tuple indices and directions for sorting,
- # along with some printable description
- sort_arg_dict_default = {\
- "calls" : (((1,-1), ), "call count"),\
- "cumulative": (((3,-1), ), "cumulative time"),\
- "file" : (((4, 1), ), "file name"),\
- "line" : (((5, 1), ), "line number"),\
- "module" : (((4, 1), ), "file name"),\
- "name" : (((6, 1), ), "function name"),\
- "nfl" : (((6, 1),(4, 1),(5, 1),), "name/file/line"), \
- "pcalls" : (((0,-1), ), "call count"),\
- "stdname" : (((7, 1), ), "standard name"),\
- "time" : (((2,-1), ), "internal time"),\
- }
-
- def get_sort_arg_defs(self):
- """Expand all abbreviations that are unique."""
- if not self.sort_arg_dict:
- self.sort_arg_dict = dict = {}
- std_list = dict.keys()
- bad_list = {}
- for word in self.sort_arg_dict_default.keys():
- fragment = word
- while fragment:
- if not fragment:
- break
- if dict.has_key(fragment):
- bad_list[fragment] = 0
- break
- dict[fragment] = self. \
- sort_arg_dict_default[word]
- fragment = fragment[:-1]
- for word in bad_list.keys():
- del dict[word]
- return self.sort_arg_dict
-
-
- def sort_stats(self, *field):
- if not field:
- self.fcn_list = 0
- return self
- if len(field) == 1 and type(field[0]) == type(1):
- # Be compatible with old profiler
- field = [ {-1: "stdname", \
- 0:"calls", \
- 1:"time", \
- 2: "cumulative" } [ field[0] ] ]
-
- sort_arg_defs = self.get_sort_arg_defs()
- sort_tuple = ()
- self.sort_type = ""
- connector = ""
- for word in field:
- sort_tuple = sort_tuple + sort_arg_defs[word][0]
- self.sort_type = self.sort_type + connector + \
- sort_arg_defs[word][1]
- connector = ", "
-
- stats_list = []
- for func in self.stats.keys():
- cc, nc, tt, ct, callers = self.stats[func]
- stats_list.append((cc, nc, tt, ct) + func_split(func) \
- + (func_std_string(func), func,) )
-
- stats_list.sort(TupleComp(sort_tuple).compare)
-
- self.fcn_list = fcn_list = []
- for tuple in stats_list:
- fcn_list.append(tuple[-1])
- return self
-
-
- def reverse_order(self):
- if self.fcn_list: self.fcn_list.reverse()
- return self
-
- def strip_dirs(self):
- oldstats = self.stats
- self.stats = newstats = {}
- max_name_len = 0
- for func in oldstats.keys():
- cc, nc, tt, ct, callers = oldstats[func]
- newfunc = func_strip_path(func)
- if len(func_std_string(newfunc)) > max_name_len:
- max_name_len = len(func_std_string(newfunc))
- newcallers = {}
- for func2 in callers.keys():
- newcallers[func_strip_path(func2)] = \
- callers[func2]
-
- if newstats.has_key(newfunc):
- newstats[newfunc] = add_func_stats( \
- newstats[newfunc],\
- (cc, nc, tt, ct, newcallers))
- else:
- newstats[newfunc] = (cc, nc, tt, ct, newcallers)
- old_top = self.top_level
- self.top_level = new_top = {}
- for func in old_top.keys():
- new_top[func_strip_path(func)] = None
-
- self.max_name_len = max_name_len
-
- self.fcn_list = None
- self.all_callees = None
- return self
-
-
-
- def calc_callees(self):
- if self.all_callees: return
- self.all_callees = all_callees = {}
- for func in self.stats.keys():
- if not all_callees.has_key(func):
- all_callees[func] = {}
- cc, nc, tt, ct, callers = self.stats[func]
- for func2 in callers.keys():
- if not all_callees.has_key(func2):
- all_callees[func2] = {}
- all_callees[func2][func] = callers[func2]
- return
-
- #******************************************************************
- # The following functions support actual printing of reports
- #******************************************************************
-
- # Optional "amount" is either a line count, or a percentage of lines.
-
- def eval_print_amount(self, sel, list, msg):
- new_list = list
- if type(sel) == type(""):
- new_list = []
- for func in list:
- if re.search(sel, func_std_string(func)):
- new_list.append(func)
- else:
- count = len(list)
- if type(sel) == type(1.0) and 0.0 <= sel < 1.0:
- count = int (count * sel + .5)
- new_list = list[:count]
- elif type(sel) == type(1) and 0 <= sel < count:
- count = sel
- new_list = list[:count]
- if len(list) != len(new_list):
- msg = msg + " List reduced from " + `len(list)` \
- + " to " + `len(new_list)` + \
- " due to restriction <" + `sel` + ">\n"
-
- return new_list, msg
-
-
-
- def get_print_list(self, sel_list):
- width = self.max_name_len
- if self.fcn_list:
- list = self.fcn_list[:]
- msg = " Ordered by: " + self.sort_type + '\n'
- else:
- list = self.stats.keys()
- msg = " Random listing order was used\n"
-
- for selection in sel_list:
- list,msg = self.eval_print_amount(selection, list, msg)
-
- count = len(list)
-
- if not list:
- return 0, list
- print msg
- if count < len(self.stats):
- width = 0
- for func in list:
- if len(func_std_string(func)) > width:
- width = len(func_std_string(func))
- return width+2, list
-
- def print_stats(self, *amount):
- for filename in self.files:
- print filename
- if self.files: print
- indent = " "
- for func in self.top_level.keys():
- print indent, func_get_function_name(func)
-
- print indent, self.total_calls, "function calls",
- if self.total_calls != self.prim_calls:
- print "(" + `self.prim_calls`, "primitive calls)",
- print "in", fpformat.fix(self.total_tt, 3), "CPU seconds"
- print
- width, list = self.get_print_list(amount)
- if list:
- self.print_title()
- for func in list:
- self.print_line(func)
- print
- print
- return self
-
-
- def print_callees(self, *amount):
- width, list = self.get_print_list(amount)
- if list:
- self.calc_callees()
-
- self.print_call_heading(width, "called...")
- for func in list:
- if self.all_callees.has_key(func):
- self.print_call_line(width, \
- func, self.all_callees[func])
- else:
- self.print_call_line(width, func, {})
- print
- print
- return self
-
- def print_callers(self, *amount):
- width, list = self.get_print_list(amount)
- if list:
- self.print_call_heading(width, "was called by...")
- for func in list:
- cc, nc, tt, ct, callers = self.stats[func]
- self.print_call_line(width, func, callers)
- print
- print
- return self
-
- def print_call_heading(self, name_size, column_title):
- print string.ljust("Function ", name_size) + column_title
-
-
- def print_call_line(self, name_size, source, call_dict):
- print string.ljust(func_std_string(source), name_size),
- if not call_dict:
- print "--"
- return
- clist = call_dict.keys()
- clist.sort()
- name_size = name_size + 1
- indent = ""
- for func in clist:
- name = func_std_string(func)
- print indent*name_size + name + '(' \
- + `call_dict[func]`+')', \
- f8(self.stats[func][3])
- indent = " "
-
-
-
- def print_title(self):
- print string.rjust('ncalls', 9),
- print string.rjust('tottime', 8),
- print string.rjust('percall', 8),
- print string.rjust('cumtime', 8),
- print string.rjust('percall', 8),
- print 'filename:lineno(function)'
-
-
- def print_line(self, func): # hack : should print percentages
- cc, nc, tt, ct, callers = self.stats[func]
- c = `nc`
- if nc != cc:
- c = c + '/' + `cc`
- print string.rjust(c, 9),
- print f8(tt),
- if nc == 0:
- print ' '*8,
- else:
- print f8(tt/nc),
- print f8(ct),
- if cc == 0:
- print ' '*8,
- else:
- print f8(ct/cc),
- print func_std_string(func)
-
-
- def ignore(self):
- pass # has no return value, so use at end of line :-)
+ """This class is used for creating reports from data generated by the
+ Profile class. It is a "friend" of that class, and imports data either
+ by direct access to members of Profile class, or by reading in a dictionary
+ that was emitted (via marshal) from the Profile class.
+
+ The big change from the previous Profiler (in terms of raw functionality)
+ is that an "add()" method has been provided to combine Stats from
+ several distinct profile runs. Both the constructor and the add()
+ method now take arbitrarily many file names as arguments.
+
+ All the print methods now take an argument that indicates how many lines
+ to print. If the arg is a floating point number between 0 and 1.0, then
+ it is taken as a decimal percentage of the available lines to be printed
+ (e.g., .1 means print 10% of all available lines). If it is an integer,
+ it is taken to mean the number of lines of data that you wish to have
+ printed.
+
+ The sort_stats() method now processes some additional options (i.e., in
+ addition to the old -1, 0, 1, or 2). It takes an arbitrary number of quoted
+ strings to select the sort order. For example sort_stats('time', 'name')
+ sorts on the major key of "internal function time", and on the minor
+ key of 'the name of the function'. Look at the two tables in sort_stats()
+ and get_sort_arg_defs(self) for more examples.
+
+ All methods now return "self", so you can string together commands like:
+ Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
+ print_stats(5).print_callers(5)
+ """
+
+ def __init__(self, *args):
+ if not len(args):
+ arg = None
+ else:
+ arg = args[0]
+ args = args[1:]
+ self.init(arg)
+ apply(self.add, args).ignore()
+
+ def init(self, arg):
+ self.all_callees = None # calc only if needed
+ self.files = []
+ self.fcn_list = None
+ self.total_tt = 0
+ self.total_calls = 0
+ self.prim_calls = 0
+ self.max_name_len = 0
+ self.top_level = {}
+ self.stats = {}
+ self.sort_arg_dict = {}
+ self.load_stats(arg)
+ trouble = 1
+ try:
+ self.get_top_level_stats()
+ trouble = 0
+ finally:
+ if trouble:
+ print "Invalid timing data",
+ if self.files: print self.files[-1],
+ print
+
+
+ def load_stats(self, arg):
+ if not arg: self.stats = {}
+ elif type(arg) == type(""):
+ f = open(arg, 'rb')
+ self.stats = marshal.load(f)
+ f.close()
+ try:
+ file_stats = os.stat(arg)
+ arg = time.ctime(file_stats[8]) + " " + arg
+ except: # in case this is not unix
+ pass
+ self.files = [ arg ]
+ elif hasattr(arg, 'create_stats'):
+ arg.create_stats()
+ self.stats = arg.stats
+ arg.stats = {}
+ if not self.stats:
+ raise TypeError, "Cannot create or construct a " \
+ + `self.__class__` \
+ + " object from '" + `arg` + "'"
+ return
+
+ def get_top_level_stats(self):
+ for func in self.stats.keys():
+ cc, nc, tt, ct, callers = self.stats[func]
+ self.total_calls = self.total_calls + nc
+ self.prim_calls = self.prim_calls + cc
+ self.total_tt = self.total_tt + tt
+ if callers.has_key(("jprofile", 0, "profiler")):
+ self.top_level[func] = None
+ if len(func_std_string(func)) > self.max_name_len:
+ self.max_name_len = len(func_std_string(func))
+
+ def add(self, *arg_list):
+ if not arg_list: return self
+ if len(arg_list) > 1: apply(self.add, arg_list[1:])
+ other = arg_list[0]
+ if type(self) != type(other) or \
+ self.__class__ != other.__class__:
+ other = Stats(other)
+ self.files = self.files + other.files
+ self.total_calls = self.total_calls + other.total_calls
+ self.prim_calls = self.prim_calls + other.prim_calls
+ self.total_tt = self.total_tt + other.total_tt
+ for func in other.top_level.keys():
+ self.top_level[func] = None
+
+ if self.max_name_len < other.max_name_len:
+ self.max_name_len = other.max_name_len
+
+ self.fcn_list = None
+
+ for func in other.stats.keys():
+ if self.stats.has_key(func):
+ old_func_stat = self.stats[func]
+ else:
+ old_func_stat = (0, 0, 0, 0, {},)
+ self.stats[func] = add_func_stats(old_func_stat, \
+ other.stats[func])
+ return self
+
+
+
+ # list the tuple indices and directions for sorting,
+ # along with some printable description
+ sort_arg_dict_default = {\
+ "calls" : (((1,-1), ), "call count"),\
+ "cumulative": (((3,-1), ), "cumulative time"),\
+ "file" : (((4, 1), ), "file name"),\
+ "line" : (((5, 1), ), "line number"),\
+ "module" : (((4, 1), ), "file name"),\
+ "name" : (((6, 1), ), "function name"),\
+ "nfl" : (((6, 1),(4, 1),(5, 1),), "name/file/line"), \
+ "pcalls" : (((0,-1), ), "call count"),\
+ "stdname" : (((7, 1), ), "standard name"),\
+ "time" : (((2,-1), ), "internal time"),\
+ }
+
+ def get_sort_arg_defs(self):
+ """Expand all abbreviations that are unique."""
+ if not self.sort_arg_dict:
+ self.sort_arg_dict = dict = {}
+ std_list = dict.keys()
+ bad_list = {}
+ for word in self.sort_arg_dict_default.keys():
+ fragment = word
+ while fragment:
+ if not fragment:
+ break
+ if dict.has_key(fragment):
+ bad_list[fragment] = 0
+ break
+ dict[fragment] = self. \
+ sort_arg_dict_default[word]
+ fragment = fragment[:-1]
+ for word in bad_list.keys():
+ del dict[word]
+ return self.sort_arg_dict
+
+
+ def sort_stats(self, *field):
+ if not field:
+ self.fcn_list = 0
+ return self
+ if len(field) == 1 and type(field[0]) == type(1):
+ # Be compatible with old profiler
+ field = [ {-1: "stdname", \
+ 0:"calls", \
+ 1:"time", \
+ 2: "cumulative" } [ field[0] ] ]
+
+ sort_arg_defs = self.get_sort_arg_defs()
+ sort_tuple = ()
+ self.sort_type = ""
+ connector = ""
+ for word in field:
+ sort_tuple = sort_tuple + sort_arg_defs[word][0]
+ self.sort_type = self.sort_type + connector + \
+ sort_arg_defs[word][1]
+ connector = ", "
+
+ stats_list = []
+ for func in self.stats.keys():
+ cc, nc, tt, ct, callers = self.stats[func]
+ stats_list.append((cc, nc, tt, ct) + func_split(func) \
+ + (func_std_string(func), func,) )
+
+ stats_list.sort(TupleComp(sort_tuple).compare)
+
+ self.fcn_list = fcn_list = []
+ for tuple in stats_list:
+ fcn_list.append(tuple[-1])
+ return self
+
+
+ def reverse_order(self):
+ if self.fcn_list: self.fcn_list.reverse()
+ return self
+
+ def strip_dirs(self):
+ oldstats = self.stats
+ self.stats = newstats = {}
+ max_name_len = 0
+ for func in oldstats.keys():
+ cc, nc, tt, ct, callers = oldstats[func]
+ newfunc = func_strip_path(func)
+ if len(func_std_string(newfunc)) > max_name_len:
+ max_name_len = len(func_std_string(newfunc))
+ newcallers = {}
+ for func2 in callers.keys():
+ newcallers[func_strip_path(func2)] = \
+ callers[func2]
+
+ if newstats.has_key(newfunc):
+ newstats[newfunc] = add_func_stats( \
+ newstats[newfunc],\
+ (cc, nc, tt, ct, newcallers))
+ else:
+ newstats[newfunc] = (cc, nc, tt, ct, newcallers)
+ old_top = self.top_level
+ self.top_level = new_top = {}
+ for func in old_top.keys():
+ new_top[func_strip_path(func)] = None
+
+ self.max_name_len = max_name_len
+
+ self.fcn_list = None
+ self.all_callees = None
+ return self
+
+
+
+ def calc_callees(self):
+ if self.all_callees: return
+ self.all_callees = all_callees = {}
+ for func in self.stats.keys():
+ if not all_callees.has_key(func):
+ all_callees[func] = {}
+ cc, nc, tt, ct, callers = self.stats[func]
+ for func2 in callers.keys():
+ if not all_callees.has_key(func2):
+ all_callees[func2] = {}
+ all_callees[func2][func] = callers[func2]
+ return
+
+ #******************************************************************
+ # The following functions support actual printing of reports
+ #******************************************************************
+
+ # Optional "amount" is either a line count, or a percentage of lines.
+
+ def eval_print_amount(self, sel, list, msg):
+ new_list = list
+ if type(sel) == type(""):
+ new_list = []
+ for func in list:
+ if re.search(sel, func_std_string(func)):
+ new_list.append(func)
+ else:
+ count = len(list)
+ if type(sel) == type(1.0) and 0.0 <= sel < 1.0:
+ count = int (count * sel + .5)
+ new_list = list[:count]
+ elif type(sel) == type(1) and 0 <= sel < count:
+ count = sel
+ new_list = list[:count]
+ if len(list) != len(new_list):
+ msg = msg + " List reduced from " + `len(list)` \
+ + " to " + `len(new_list)` + \
+ " due to restriction <" + `sel` + ">\n"
+
+ return new_list, msg
+
+
+
+ def get_print_list(self, sel_list):
+ width = self.max_name_len
+ if self.fcn_list:
+ list = self.fcn_list[:]
+ msg = " Ordered by: " + self.sort_type + '\n'
+ else:
+ list = self.stats.keys()
+ msg = " Random listing order was used\n"
+
+ for selection in sel_list:
+ list,msg = self.eval_print_amount(selection, list, msg)
+
+ count = len(list)
+
+ if not list:
+ return 0, list
+ print msg
+ if count < len(self.stats):
+ width = 0
+ for func in list:
+ if len(func_std_string(func)) > width:
+ width = len(func_std_string(func))
+ return width+2, list
+
+ def print_stats(self, *amount):
+ for filename in self.files:
+ print filename
+ if self.files: print
+ indent = " "
+ for func in self.top_level.keys():
+ print indent, func_get_function_name(func)
+
+ print indent, self.total_calls, "function calls",
+ if self.total_calls != self.prim_calls:
+ print "(" + `self.prim_calls`, "primitive calls)",
+ print "in", fpformat.fix(self.total_tt, 3), "CPU seconds"
+ print
+ width, list = self.get_print_list(amount)
+ if list:
+ self.print_title()
+ for func in list:
+ self.print_line(func)
+ print
+ print
+ return self
+
+
+ def print_callees(self, *amount):
+ width, list = self.get_print_list(amount)
+ if list:
+ self.calc_callees()
+
+ self.print_call_heading(width, "called...")
+ for func in list:
+ if self.all_callees.has_key(func):
+ self.print_call_line(width, \
+ func, self.all_callees[func])
+ else:
+ self.print_call_line(width, func, {})
+ print
+ print
+ return self
+
+ def print_callers(self, *amount):
+ width, list = self.get_print_list(amount)
+ if list:
+ self.print_call_heading(width, "was called by...")
+ for func in list:
+ cc, nc, tt, ct, callers = self.stats[func]
+ self.print_call_line(width, func, callers)
+ print
+ print
+ return self
+
+ def print_call_heading(self, name_size, column_title):
+ print string.ljust("Function ", name_size) + column_title
+
+
+ def print_call_line(self, name_size, source, call_dict):
+ print string.ljust(func_std_string(source), name_size),
+ if not call_dict:
+ print "--"
+ return
+ clist = call_dict.keys()
+ clist.sort()
+ name_size = name_size + 1
+ indent = ""
+ for func in clist:
+ name = func_std_string(func)
+ print indent*name_size + name + '(' \
+ + `call_dict[func]`+')', \
+ f8(self.stats[func][3])
+ indent = " "
+
+
+
+ def print_title(self):
+ print string.rjust('ncalls', 9),
+ print string.rjust('tottime', 8),
+ print string.rjust('percall', 8),
+ print string.rjust('cumtime', 8),
+ print string.rjust('percall', 8),
+ print 'filename:lineno(function)'
+
+
+ def print_line(self, func): # hack : should print percentages
+ cc, nc, tt, ct, callers = self.stats[func]
+ c = `nc`
+ if nc != cc:
+ c = c + '/' + `cc`
+ print string.rjust(c, 9),
+ print f8(tt),
+ if nc == 0:
+ print ' '*8,
+ else:
+ print f8(tt/nc),
+ print f8(ct),
+ if cc == 0:
+ print ' '*8,
+ else:
+ print f8(ct/cc),
+ print func_std_string(func)
+
+
+ def ignore(self):
+ pass # has no return value, so use at end of line :-)
class TupleComp:
- """This class provides a generic function for comparing any two tuples.
- Each instance records a list of tuple-indices (from most significant
- to least significant), and sort direction (ascending or decending) for
- each tuple-index. The compare functions can then be used as the function
- argument to the system sort() function when a list of tuples need to be
- sorted in the instances order."""
-
- def __init__(self, comp_select_list):
- self.comp_select_list = comp_select_list
-
- def compare (self, left, right):
- for index, direction in self.comp_select_list:
- l = left[index]
- r = right[index]
- if l < r:
- return -direction
- if l > r:
- return direction
- return 0
-
-
+ """This class provides a generic function for comparing any two tuples.
+ Each instance records a list of tuple-indices (from most significant
+ to least significant), and sort direction (ascending or decending) for
+ each tuple-index. The compare functions can then be used as the function
+ argument to the system sort() function when a list of tuples need to be
+ sorted in the instances order."""
+
+ def __init__(self, comp_select_list):
+ self.comp_select_list = comp_select_list
+
+ def compare (self, left, right):
+ for index, direction in self.comp_select_list:
+ l = left[index]
+ r = right[index]
+ if l < r:
+ return -direction
+ if l > r:
+ return direction
+ return 0
+
+
#**************************************************************************
def func_strip_path(func_name):
- file, line, name = func_name
- return os.path.basename(file), line, name
+ file, line, name = func_name
+ return os.path.basename(file), line, name
def func_get_function_name(func):
- return func[2]
+ return func[2]
def func_std_string(func_name): # match what old profile produced
- file, line, name = func_name
- return file + ":" + `line` + "(" + name + ")"
+ file, line, name = func_name
+ return file + ":" + `line` + "(" + name + ")"
def func_split(func_name):
- return func_name
+ return func_name
#**************************************************************************
# The following functions combine statists for pairs functions.
# The bulk of the processing involves correctly handling "call" lists,
-# such as callers and callees.
+# such as callers and callees.
#**************************************************************************
def add_func_stats(target, source):
- """Add together all the stats for two profile entries."""
- cc, nc, tt, ct, callers = source
- t_cc, t_nc, t_tt, t_ct, t_callers = target
- return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, \
- add_callers(t_callers, callers))
+ """Add together all the stats for two profile entries."""
+ cc, nc, tt, ct, callers = source
+ t_cc, t_nc, t_tt, t_ct, t_callers = target
+ return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct, \
+ add_callers(t_callers, callers))
def add_callers(target, source):
- """Combine two caller lists in a single list."""
- new_callers = {}
- for func in target.keys():
- new_callers[func] = target[func]
- for func in source.keys():
- if new_callers.has_key(func):
- new_callers[func] = source[func] + new_callers[func]
- else:
- new_callers[func] = source[func]
- return new_callers
+ """Combine two caller lists in a single list."""
+ new_callers = {}
+ for func in target.keys():
+ new_callers[func] = target[func]
+ for func in source.keys():
+ if new_callers.has_key(func):
+ new_callers[func] = source[func] + new_callers[func]
+ else:
+ new_callers[func] = source[func]
+ return new_callers
def count_calls(callers):
- """Sum the caller statistics to get total number of calls received."""
- nc = 0
- for func in callers.keys():
- nc = nc + callers[func]
- return nc
+ """Sum the caller statistics to get total number of calls received."""
+ nc = 0
+ for func in callers.keys():
+ nc = nc + callers[func]
+ return nc
#**************************************************************************
# The following functions support printing of reports
#**************************************************************************
def f8(x):
- return string.rjust(fpformat.fix(x, 3), 8)
-
+ return string.rjust(fpformat.fix(x, 3), 8)
diff --git a/Lib/pty.py b/Lib/pty.py
index 12c9093..ef2113b 100644
--- a/Lib/pty.py
+++ b/Lib/pty.py
@@ -1,9 +1,9 @@
"""Pseudo terminal utilities."""
# Bugs: No signal handling. Doesn't set slave termios and window size.
-# Only tested on Linux.
-# See: W. Richard Stevens. 1992. Advanced Programming in the
-# UNIX Environment. Chapter 19.
+# Only tested on Linux.
+# See: W. Richard Stevens. 1992. Advanced Programming in the
+# UNIX Environment. Chapter 19.
# Author: Steen Lumholt -- with additions by Guido.
from select import select
@@ -17,133 +17,133 @@ STDERR_FILENO = 2
CHILD = 0
def openpty():
- """openpty() -> (master_fd, slave_fd)
- Open a pty master/slave pair, using os.openpty() if possible."""
+ """openpty() -> (master_fd, slave_fd)
+ Open a pty master/slave pair, using os.openpty() if possible."""
- try:
- return os.openpty()
- except (AttributeError, OSError):
- pass
- master_fd, slave_name = _open_terminal()
- slave_fd = slave_open(slave_name)
- return master_fd, slave_fd
+ try:
+ return os.openpty()
+ except (AttributeError, OSError):
+ pass
+ master_fd, slave_name = _open_terminal()
+ slave_fd = slave_open(slave_name)
+ return master_fd, slave_fd
def master_open():
- """master_open() -> (master_fd, slave_name)
- Open a pty master and return the fd, and the filename of the slave end.
- Deprecated, use openpty() instead."""
+ """master_open() -> (master_fd, slave_name)
+ Open a pty master and return the fd, and the filename of the slave end.
+ Deprecated, use openpty() instead."""
- try:
- master_fd, slave_fd = os.openpty()
- except (AttributeError, OSError):
- pass
- else:
- slave_name = os.ttyname(slave_fd)
- os.close(slave_fd)
- return master_fd, slave_name
+ try:
+ master_fd, slave_fd = os.openpty()
+ except (AttributeError, OSError):
+ pass
+ else:
+ slave_name = os.ttyname(slave_fd)
+ os.close(slave_fd)
+ return master_fd, slave_name
- return _open_terminal()
+ return _open_terminal()
def _open_terminal():
- """Open pty master and return (master_fd, tty_name).
- SGI and generic BSD version, for when openpty() fails."""
- try:
- import sgi
- except ImportError:
- pass
- else:
- try:
- tty_name, master_fd = sgi._getpty(FCNTL.O_RDWR, 0666, 0)
- except IOError, msg:
- raise os.error, msg
- return master_fd, tty_name
- for x in 'pqrstuvwxyzPQRST':
- for y in '0123456789abcdef':
- pty_name = '/dev/pty' + x + y
- try:
- fd = os.open(pty_name, FCNTL.O_RDWR)
- except os.error:
- continue
- return (fd, '/dev/tty' + x + y)
- raise os.error, 'out of pty devices'
+ """Open pty master and return (master_fd, tty_name).
+ SGI and generic BSD version, for when openpty() fails."""
+ try:
+ import sgi
+ except ImportError:
+ pass
+ else:
+ try:
+ tty_name, master_fd = sgi._getpty(FCNTL.O_RDWR, 0666, 0)
+ except IOError, msg:
+ raise os.error, msg
+ return master_fd, tty_name
+ for x in 'pqrstuvwxyzPQRST':
+ for y in '0123456789abcdef':
+ pty_name = '/dev/pty' + x + y
+ try:
+ fd = os.open(pty_name, FCNTL.O_RDWR)
+ except os.error:
+ continue
+ return (fd, '/dev/tty' + x + y)
+ raise os.error, 'out of pty devices'
def slave_open(tty_name):
- """slave_open(tty_name) -> slave_fd
- Open the pty slave and acquire the controlling terminal, returning
- opened filedescriptor.
- Deprecated, use openpty() instead."""
+ """slave_open(tty_name) -> slave_fd
+ Open the pty slave and acquire the controlling terminal, returning
+ opened filedescriptor.
+ Deprecated, use openpty() instead."""
- return os.open(tty_name, FCNTL.O_RDWR)
+ return os.open(tty_name, FCNTL.O_RDWR)
def fork():
- """fork() -> (pid, master_fd)
- Fork and make the child a session leader with a controlling terminal."""
-
- try:
- pid, fd = os.forkpty()
- except (AttributeError, OSError):
- pass
- else:
- if pid == CHILD:
- try:
- os.setsid()
- except OSError:
- # os.forkpty() already set us session leader
- pass
- return pid, fd
-
- master_fd, slave_fd = openpty()
- pid = os.fork()
- if pid == CHILD:
- # Establish a new session.
- os.setsid()
- os.close(master_fd)
-
- # Slave becomes stdin/stdout/stderr of child.
- os.dup2(slave_fd, STDIN_FILENO)
- os.dup2(slave_fd, STDOUT_FILENO)
- os.dup2(slave_fd, STDERR_FILENO)
- if (slave_fd > STDERR_FILENO):
- os.close (slave_fd)
-
- # Parent and child process.
- return pid, master_fd
+ """fork() -> (pid, master_fd)
+ Fork and make the child a session leader with a controlling terminal."""
+
+ try:
+ pid, fd = os.forkpty()
+ except (AttributeError, OSError):
+ pass
+ else:
+ if pid == CHILD:
+ try:
+ os.setsid()
+ except OSError:
+ # os.forkpty() already set us session leader
+ pass
+ return pid, fd
+
+ master_fd, slave_fd = openpty()
+ pid = os.fork()
+ if pid == CHILD:
+ # Establish a new session.
+ os.setsid()
+ os.close(master_fd)
+
+ # Slave becomes stdin/stdout/stderr of child.
+ os.dup2(slave_fd, STDIN_FILENO)
+ os.dup2(slave_fd, STDOUT_FILENO)
+ os.dup2(slave_fd, STDERR_FILENO)
+ if (slave_fd > STDERR_FILENO):
+ os.close (slave_fd)
+
+ # Parent and child process.
+ return pid, master_fd
def _writen(fd, data):
- """Write all the data to a descriptor."""
- while data != '':
- n = os.write(fd, data)
- data = data[n:]
+ """Write all the data to a descriptor."""
+ while data != '':
+ n = os.write(fd, data)
+ data = data[n:]
def _read(fd):
- """Default read function."""
- return os.read(fd, 1024)
+ """Default read function."""
+ return os.read(fd, 1024)
def _copy(master_fd, master_read=_read, stdin_read=_read):
- """Parent copy loop.
- Copies
- pty master -> standard output (master_read)
- standard input -> pty master (stdin_read)"""
- while 1:
- rfds, wfds, xfds = select(
- [master_fd, STDIN_FILENO], [], [])
- if master_fd in rfds:
- data = master_read(master_fd)
- os.write(STDOUT_FILENO, data)
- if STDIN_FILENO in rfds:
- data = stdin_read(STDIN_FILENO)
- _writen(master_fd, data)
+ """Parent copy loop.
+ Copies
+ pty master -> standard output (master_read)
+ standard input -> pty master (stdin_read)"""
+ while 1:
+ rfds, wfds, xfds = select(
+ [master_fd, STDIN_FILENO], [], [])
+ if master_fd in rfds:
+ data = master_read(master_fd)
+ os.write(STDOUT_FILENO, data)
+ if STDIN_FILENO in rfds:
+ data = stdin_read(STDIN_FILENO)
+ _writen(master_fd, data)
def spawn(argv, master_read=_read, stdin_read=_read):
- """Create a spawned process."""
- if type(argv) == type(''):
- argv = (argv,)
- pid, master_fd = fork()
- if pid == CHILD:
- apply(os.execlp, (argv[0],) + argv)
- mode = tty.tcgetattr(STDIN_FILENO)
- tty.setraw(STDIN_FILENO)
- try:
- _copy(master_fd, master_read, stdin_read)
- except:
- tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
+ """Create a spawned process."""
+ if type(argv) == type(''):
+ argv = (argv,)
+ pid, master_fd = fork()
+ if pid == CHILD:
+ apply(os.execlp, (argv[0],) + argv)
+ mode = tty.tcgetattr(STDIN_FILENO)
+ tty.setraw(STDIN_FILENO)
+ try:
+ _copy(master_fd, master_read, stdin_read)
+ except:
+ tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
diff --git a/Lib/pyclbr.py b/Lib/pyclbr.py
index 74b7ff7..43bd32c 100644
--- a/Lib/pyclbr.py
+++ b/Lib/pyclbr.py
@@ -4,7 +4,7 @@ Parse enough of a Python file to recognize class and method
definitions and to find out the superclasses of a class.
The interface consists of a single function:
- readmodule(module, path)
+ readmodule(module, path)
module is the name of a Python module, path is an optional list of
directories where the module is to be searched. If present, path is
prepended to the system search path sys.path.
@@ -15,11 +15,11 @@ are class instances of the class Class defined here.
A class is described by the class Class in this module. Instances
of this class have the following instance variables:
- name -- the name of the class
- super -- a list of super classes (Class instances)
- methods -- a dictionary of methods
- file -- the file in which the class was defined
- lineno -- the line in the file on which the class statement occurred
+ name -- the name of the class
+ super -- a list of super classes (Class instances)
+ methods -- a dictionary of methods
+ file -- the file in which the class was defined
+ lineno -- the line in the file on which the class statement occurred
The dictionary of methods uses the method names as keys and the line
numbers on which the method was defined as values.
If the name of a super class is not recognized, the corresponding
@@ -64,52 +64,52 @@ TABWIDTH = 8
_getnext = re.compile(r"""
(?P<String>
\""" [^"\\]* (?:
- (?: \\. | "(?!"") )
- [^"\\]*
- )*
+ (?: \\. | "(?!"") )
+ [^"\\]*
+ )*
\"""
| ''' [^'\\]* (?:
- (?: \\. | '(?!'') )
- [^'\\]*
- )*
- '''
+ (?: \\. | '(?!'') )
+ [^'\\]*
+ )*
+ '''
)
| (?P<Method>
- ^
- (?P<MethodIndent> [ \t]* )
- def [ \t]+
- (?P<MethodName> [a-zA-Z_] \w* )
- [ \t]* \(
+ ^
+ (?P<MethodIndent> [ \t]* )
+ def [ \t]+
+ (?P<MethodName> [a-zA-Z_] \w* )
+ [ \t]* \(
)
| (?P<Class>
- ^
- (?P<ClassIndent> [ \t]* )
- class [ \t]+
- (?P<ClassName> [a-zA-Z_] \w* )
- [ \t]*
- (?P<ClassSupers> \( [^)\n]* \) )?
- [ \t]* :
+ ^
+ (?P<ClassIndent> [ \t]* )
+ class [ \t]+
+ (?P<ClassName> [a-zA-Z_] \w* )
+ [ \t]*
+ (?P<ClassSupers> \( [^)\n]* \) )?
+ [ \t]* :
)
| (?P<Import>
- ^ import [ \t]+
- (?P<ImportList> [^#;\n]+ )
+ ^ import [ \t]+
+ (?P<ImportList> [^#;\n]+ )
)
| (?P<ImportFrom>
- ^ from [ \t]+
- (?P<ImportFromPath>
- [a-zA-Z_] \w*
- (?:
- [ \t]* \. [ \t]* [a-zA-Z_] \w*
- )*
- )
- [ \t]+
- import [ \t]+
- (?P<ImportFromList> [^#;\n]+ )
+ ^ from [ \t]+
+ (?P<ImportFromPath>
+ [a-zA-Z_] \w*
+ (?:
+ [ \t]* \. [ \t]* [a-zA-Z_] \w*
+ )*
+ )
+ [ \t]+
+ import [ \t]+
+ (?P<ImportFromList> [^#;\n]+ )
)
""", re.VERBOSE | re.DOTALL | re.MULTILINE).search
@@ -117,220 +117,220 @@ _modules = {} # cache of modules we've seen
# each Python class is represented by an instance of this class
class Class:
- '''Class to represent a Python class.'''
- def __init__(self, module, name, super, file, lineno):
- self.module = module
- self.name = name
- if super is None:
- super = []
- self.super = super
- self.methods = {}
- self.file = file
- self.lineno = lineno
-
- def _addmethod(self, name, lineno):
- self.methods[name] = lineno
+ '''Class to represent a Python class.'''
+ def __init__(self, module, name, super, file, lineno):
+ self.module = module
+ self.name = name
+ if super is None:
+ super = []
+ self.super = super
+ self.methods = {}
+ self.file = file
+ self.lineno = lineno
+
+ def _addmethod(self, name, lineno):
+ self.methods[name] = lineno
class Function(Class):
- '''Class to represent a top-level Python function'''
- def __init__(self, module, name, file, lineno):
- Class.__init__(self, module, name, None, file, lineno)
- def _addmethod(self, name, lineno):
- assert 0, "Function._addmethod() shouldn't be called"
+ '''Class to represent a top-level Python function'''
+ def __init__(self, module, name, file, lineno):
+ Class.__init__(self, module, name, None, file, lineno)
+ def _addmethod(self, name, lineno):
+ assert 0, "Function._addmethod() shouldn't be called"
def readmodule(module, path=[], inpackage=0):
- '''Backwards compatible interface.
+ '''Backwards compatible interface.
- Like readmodule_ex() but strips Function objects from the
- resulting dictionary.'''
+ Like readmodule_ex() but strips Function objects from the
+ resulting dictionary.'''
- dict = readmodule_ex(module, path, inpackage)
- res = {}
- for key, value in dict.items():
- if not isinstance(value, Function):
- res[key] = value
- return res
+ dict = readmodule_ex(module, path, inpackage)
+ res = {}
+ for key, value in dict.items():
+ if not isinstance(value, Function):
+ res[key] = value
+ return res
def readmodule_ex(module, path=[], inpackage=0):
- '''Read a module file and return a dictionary of classes.
-
- Search for MODULE in PATH and sys.path, read and parse the
- module and return a dictionary with one entry for each class
- found in the module.'''
-
- dict = {}
-
- i = string.rfind(module, '.')
- if i >= 0:
- # Dotted module name
- package = string.strip(module[:i])
- submodule = string.strip(module[i+1:])
- parent = readmodule(package, path, inpackage)
- child = readmodule(submodule, parent['__path__'], 1)
- return child
-
- if _modules.has_key(module):
- # we've seen this module before...
- return _modules[module]
- if module in sys.builtin_module_names:
- # this is a built-in module
- _modules[module] = dict
- return dict
-
- # search the path for the module
- f = None
- if inpackage:
- try:
- f, file, (suff, mode, type) = \
- imp.find_module(module, path)
- except ImportError:
- f = None
- if f is None:
- fullpath = list(path) + sys.path
- f, file, (suff, mode, type) = imp.find_module(module, fullpath)
- if type == imp.PKG_DIRECTORY:
- dict['__path__'] = [file]
- _modules[module] = dict
- path = [file] + path
- f, file, (suff, mode, type) = \
- imp.find_module('__init__', [file])
- if type != imp.PY_SOURCE:
- # not Python source, can't do anything with this module
- f.close()
- _modules[module] = dict
- return dict
-
- _modules[module] = dict
- imports = []
- classstack = [] # stack of (class, indent) pairs
- src = f.read()
- f.close()
-
- # To avoid having to stop the regexp at each newline, instead
- # when we need a line number we simply string.count the number of
- # newlines in the string since the last time we did this; i.e.,
- # lineno = lineno + \
- # string.count(src, '\n', last_lineno_pos, here)
- # last_lineno_pos = here
- countnl = string.count
- lineno, last_lineno_pos = 1, 0
- i = 0
- while 1:
- m = _getnext(src, i)
- if not m:
- break
- start, i = m.span()
-
- if m.start("Method") >= 0:
- # found a method definition or function
- thisindent = _indent(m.group("MethodIndent"))
- meth_name = m.group("MethodName")
- lineno = lineno + \
- countnl(src, '\n',
- last_lineno_pos, start)
- last_lineno_pos = start
- # close all classes indented at least as much
- while classstack and \
- classstack[-1][1] >= thisindent:
- del classstack[-1]
- if classstack:
- # it's a class method
- cur_class = classstack[-1][0]
- cur_class._addmethod(meth_name, lineno)
- else:
- # it's a function
- f = Function(module, meth_name,
- file, lineno)
- dict[meth_name] = f
-
- elif m.start("String") >= 0:
- pass
-
- elif m.start("Class") >= 0:
- # we found a class definition
- thisindent = _indent(m.group("ClassIndent"))
- # close all classes indented at least as much
- while classstack and \
- classstack[-1][1] >= thisindent:
- del classstack[-1]
- lineno = lineno + \
- countnl(src, '\n', last_lineno_pos, start)
- last_lineno_pos = start
- class_name = m.group("ClassName")
- inherit = m.group("ClassSupers")
- if inherit:
- # the class inherits from other classes
- inherit = string.strip(inherit[1:-1])
- names = []
- for n in string.splitfields(inherit, ','):
- n = string.strip(n)
- if dict.has_key(n):
- # we know this super class
- n = dict[n]
- else:
- c = string.splitfields(n, '.')
- if len(c) > 1:
- # super class
- # is of the
- # form module.class:
- # look in
- # module for class
- m = c[-2]
- c = c[-1]
- if _modules.has_key(m):
- d = _modules[m]
- if d.has_key(c):
- n = d[c]
- names.append(n)
- inherit = names
- # remember this class
- cur_class = Class(module, class_name, inherit,
- file, lineno)
- dict[class_name] = cur_class
- classstack.append((cur_class, thisindent))
-
- elif m.start("Import") >= 0:
- # import module
- for n in string.split(m.group("ImportList"), ','):
- n = string.strip(n)
- try:
- # recursively read the imported module
- d = readmodule(n, path, inpackage)
- except:
- ##print 'module', n, 'not found'
- pass
-
- elif m.start("ImportFrom") >= 0:
- # from module import stuff
- mod = m.group("ImportFromPath")
- names = string.split(m.group("ImportFromList"), ',')
- try:
- # recursively read the imported module
- d = readmodule(mod, path, inpackage)
- except:
- ##print 'module', mod, 'not found'
- continue
- # add any classes that were defined in the
- # imported module to our name space if they
- # were mentioned in the list
- for n in names:
- n = string.strip(n)
- if d.has_key(n):
- dict[n] = d[n]
- elif n == '*':
- # only add a name if not
- # already there (to mimic what
- # Python does internally)
- # also don't add names that
- # start with _
- for n in d.keys():
- if n[0] != '_' and \
- not dict.has_key(n):
- dict[n] = d[n]
- else:
- assert 0, "regexp _getnext found something unexpected"
-
- return dict
+ '''Read a module file and return a dictionary of classes.
+
+ Search for MODULE in PATH and sys.path, read and parse the
+ module and return a dictionary with one entry for each class
+ found in the module.'''
+
+ dict = {}
+
+ i = string.rfind(module, '.')
+ if i >= 0:
+ # Dotted module name
+ package = string.strip(module[:i])
+ submodule = string.strip(module[i+1:])
+ parent = readmodule(package, path, inpackage)
+ child = readmodule(submodule, parent['__path__'], 1)
+ return child
+
+ if _modules.has_key(module):
+ # we've seen this module before...
+ return _modules[module]
+ if module in sys.builtin_module_names:
+ # this is a built-in module
+ _modules[module] = dict
+ return dict
+
+ # search the path for the module
+ f = None
+ if inpackage:
+ try:
+ f, file, (suff, mode, type) = \
+ imp.find_module(module, path)
+ except ImportError:
+ f = None
+ if f is None:
+ fullpath = list(path) + sys.path
+ f, file, (suff, mode, type) = imp.find_module(module, fullpath)
+ if type == imp.PKG_DIRECTORY:
+ dict['__path__'] = [file]
+ _modules[module] = dict
+ path = [file] + path
+ f, file, (suff, mode, type) = \
+ imp.find_module('__init__', [file])
+ if type != imp.PY_SOURCE:
+ # not Python source, can't do anything with this module
+ f.close()
+ _modules[module] = dict
+ return dict
+
+ _modules[module] = dict
+ imports = []
+ classstack = [] # stack of (class, indent) pairs
+ src = f.read()
+ f.close()
+
+ # To avoid having to stop the regexp at each newline, instead
+ # when we need a line number we simply string.count the number of
+ # newlines in the string since the last time we did this; i.e.,
+ # lineno = lineno + \
+ # string.count(src, '\n', last_lineno_pos, here)
+ # last_lineno_pos = here
+ countnl = string.count
+ lineno, last_lineno_pos = 1, 0
+ i = 0
+ while 1:
+ m = _getnext(src, i)
+ if not m:
+ break
+ start, i = m.span()
+
+ if m.start("Method") >= 0:
+ # found a method definition or function
+ thisindent = _indent(m.group("MethodIndent"))
+ meth_name = m.group("MethodName")
+ lineno = lineno + \
+ countnl(src, '\n',
+ last_lineno_pos, start)
+ last_lineno_pos = start
+ # close all classes indented at least as much
+ while classstack and \
+ classstack[-1][1] >= thisindent:
+ del classstack[-1]
+ if classstack:
+ # it's a class method
+ cur_class = classstack[-1][0]
+ cur_class._addmethod(meth_name, lineno)
+ else:
+ # it's a function
+ f = Function(module, meth_name,
+ file, lineno)
+ dict[meth_name] = f
+
+ elif m.start("String") >= 0:
+ pass
+
+ elif m.start("Class") >= 0:
+ # we found a class definition
+ thisindent = _indent(m.group("ClassIndent"))
+ # close all classes indented at least as much
+ while classstack and \
+ classstack[-1][1] >= thisindent:
+ del classstack[-1]
+ lineno = lineno + \
+ countnl(src, '\n', last_lineno_pos, start)
+ last_lineno_pos = start
+ class_name = m.group("ClassName")
+ inherit = m.group("ClassSupers")
+ if inherit:
+ # the class inherits from other classes
+ inherit = string.strip(inherit[1:-1])
+ names = []
+ for n in string.splitfields(inherit, ','):
+ n = string.strip(n)
+ if dict.has_key(n):
+ # we know this super class
+ n = dict[n]
+ else:
+ c = string.splitfields(n, '.')
+ if len(c) > 1:
+ # super class
+ # is of the
+ # form module.class:
+ # look in
+ # module for class
+ m = c[-2]
+ c = c[-1]
+ if _modules.has_key(m):
+ d = _modules[m]
+ if d.has_key(c):
+ n = d[c]
+ names.append(n)
+ inherit = names
+ # remember this class
+ cur_class = Class(module, class_name, inherit,
+ file, lineno)
+ dict[class_name] = cur_class
+ classstack.append((cur_class, thisindent))
+
+ elif m.start("Import") >= 0:
+ # import module
+ for n in string.split(m.group("ImportList"), ','):
+ n = string.strip(n)
+ try:
+ # recursively read the imported module
+ d = readmodule(n, path, inpackage)
+ except:
+ ##print 'module', n, 'not found'
+ pass
+
+ elif m.start("ImportFrom") >= 0:
+ # from module import stuff
+ mod = m.group("ImportFromPath")
+ names = string.split(m.group("ImportFromList"), ',')
+ try:
+ # recursively read the imported module
+ d = readmodule(mod, path, inpackage)
+ except:
+ ##print 'module', mod, 'not found'
+ continue
+ # add any classes that were defined in the
+ # imported module to our name space if they
+ # were mentioned in the list
+ for n in names:
+ n = string.strip(n)
+ if d.has_key(n):
+ dict[n] = d[n]
+ elif n == '*':
+ # only add a name if not
+ # already there (to mimic what
+ # Python does internally)
+ # also don't add names that
+ # start with _
+ for n in d.keys():
+ if n[0] != '_' and \
+ not dict.has_key(n):
+ dict[n] = d[n]
+ else:
+ assert 0, "regexp _getnext found something unexpected"
+
+ return dict
def _indent(ws, _expandtabs=string.expandtabs):
- return len(_expandtabs(ws, TABWIDTH))
+ return len(_expandtabs(ws, TABWIDTH))
diff --git a/Lib/quopri.py b/Lib/quopri.py
index 80fc6f9..5bb11f6 100755
--- a/Lib/quopri.py
+++ b/Lib/quopri.py
@@ -26,7 +26,7 @@ def encode(input, output, quotetabs):
'input' and 'output' are files with readline() and write() methods.
The 'quotetabs' flag indicates whether tabs should be quoted.
- """
+ """
while 1:
line = input.readline()
if not line: