summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Doc/lib/liblogging.tex40
-rw-r--r--Doc/lib/libsmtplib.tex2
-rw-r--r--Doc/lib/libsocket.tex8
-rw-r--r--Doc/lib/libtest.tex2
-rw-r--r--Lib/BaseHTTPServer.py4
-rw-r--r--Lib/ctypes/__init__.py30
-rw-r--r--Lib/ctypes/test/test_win32.py24
-rw-r--r--Lib/ctypes/util.py78
-rw-r--r--Lib/ctypes/wintypes.py12
-rw-r--r--Lib/idlelib/EditorWindow.py2
-rw-r--r--Lib/logging/__init__.py27
-rw-r--r--Lib/logging/handlers.py49
-rwxr-xr-xLib/platform.py370
-rwxr-xr-xLib/pydoc.py19
-rw-r--r--Lib/sqlite3/dbapi2.py2
-rw-r--r--Lib/sqlite3/test/factory.py23
-rw-r--r--Lib/sqlite3/test/regression.py10
-rw-r--r--Lib/sqlite3/test/types.py27
-rw-r--r--Lib/subprocess.py2
-rw-r--r--Lib/test/test___all__.py1
-rw-r--r--Lib/test/test_optparse.py12
-rw-r--r--Lib/test/test_set.py11
-rw-r--r--Lib/test/test_subprocess.py2
-rw-r--r--Makefile.pre.in3
-rw-r--r--Modules/_ctypes/_ctypes.c2
-rw-r--r--Modules/_ctypes/libffi_msvc/ffi.c3
-rw-r--r--Modules/_sqlite/cache.c58
-rw-r--r--Modules/_sqlite/cache.h30
-rw-r--r--Modules/_sqlite/connection.c260
-rw-r--r--Modules/_sqlite/connection.h31
-rw-r--r--Modules/_sqlite/cursor.c220
-rw-r--r--Modules/_sqlite/cursor.h34
-rw-r--r--Modules/_sqlite/microprotocols.c10
-rw-r--r--Modules/_sqlite/microprotocols.h2
-rw-r--r--Modules/_sqlite/module.c119
-rw-r--r--Modules/_sqlite/module.h28
-rw-r--r--Modules/_sqlite/prepare_protocol.c20
-rw-r--r--Modules/_sqlite/prepare_protocol.h10
-rw-r--r--Modules/_sqlite/row.c75
-rw-r--r--Modules/_sqlite/row.h6
-rw-r--r--Modules/_sqlite/statement.c54
-rw-r--r--Modules/_sqlite/statement.h22
-rw-r--r--Modules/_sqlite/util.c19
-rw-r--r--Modules/_sqlite/util.h4
-rw-r--r--Objects/intobject.c5
-rw-r--r--Objects/setobject.c4
-rw-r--r--Python/Python-ast.c2
-rwxr-xr-xTools/pybench/pybench.py70
48 files changed, 1190 insertions, 658 deletions
diff --git a/Doc/lib/liblogging.tex b/Doc/lib/liblogging.tex
index b97854d..4bb0595 100644
--- a/Doc/lib/liblogging.tex
+++ b/Doc/lib/liblogging.tex
@@ -989,10 +989,11 @@ The \class{FileHandler} class, located in the core \module{logging}
package, sends logging output to a disk file. It inherits the output
functionality from \class{StreamHandler}.
-\begin{classdesc}{FileHandler}{filename\optional{, mode}}
+\begin{classdesc}{FileHandler}{filename\optional{, mode\optional{, encoding}}}
Returns a new instance of the \class{FileHandler} class. The specified
file is opened and used as the stream for logging. If \var{mode} is
-not specified, \constant{'a'} is used. By default, the file grows
+not specified, \constant{'a'} is used. If \var{encoding} is not \var{None},
+it is used to open the file with that encoding. By default, the file grows
indefinitely.
\end{classdesc}
@@ -1004,6 +1005,41 @@ Closes the file.
Outputs the record to the file.
\end{methoddesc}
+\subsubsection{WatchedFileHandler}
+
+\versionadded{2.6}
+The \class{WatchedFileHandler} class, located in the \module{logging.handlers}
+module, is a \class{FileHandler} which watches the file it is logging to.
+If the file changes, it is closed and reopened using the file name.
+
+A file change can happen because of usage of programs such as \var{newsyslog}
+and \var{logrotate} which perform log file rotation. This handler, intended
+for use under Unix/Linux, watches the file to see if it has changed since the
+last emit. (A file is deemed to have changed if its device or inode have
+changed.) If the file has changed, the old file stream is closed, and the file
+opened to get a new stream.
+
+This handler is not appropriate for use under Windows, because under Windows
+open log files cannot be moved or renamed - logging opens the files with
+exclusive locks - and so there is no need for such a handler. Furthermore,
+\var{ST_INO} is not supported under Windows; \function{stat()} always returns
+zero for this value.
+
+\begin{classdesc}{WatchedFileHandler}{filename\optional{,mode\optional{,
+ encoding}}}
+Returns a new instance of the \class{WatchedFileHandler} class. The specified
+file is opened and used as the stream for logging. If \var{mode} is
+not specified, \constant{'a'} is used. If \var{encoding} is not \var{None},
+it is used to open the file with that encoding. By default, the file grows
+indefinitely.
+\end{classdesc}
+
+\begin{methoddesc}{emit}{record}
+Outputs the record to the file, but first checks to see if the file has
+changed. If it has, the existing stream is flushed and closed and the file
+opened again, before outputting the record to the file.
+\end{methoddesc}
+
\subsubsection{RotatingFileHandler}
The \class{RotatingFileHandler} class, located in the \module{logging.handlers}
diff --git a/Doc/lib/libsmtplib.tex b/Doc/lib/libsmtplib.tex
index 962383f..a873a9d 100644
--- a/Doc/lib/libsmtplib.tex
+++ b/Doc/lib/libsmtplib.tex
@@ -185,7 +185,7 @@ or may raise the following exceptions:
The server didn't reply properly to the \samp{HELO} greeting.
\item[\exception{SMTPAuthenticationError}]
The server didn't accept the username/password combination.
- \item[\exception{SMTPError}]
+ \item[\exception{SMTPException}]
No suitable authentication method was found.
\end{description}
\end{methoddesc}
diff --git a/Doc/lib/libsocket.tex b/Doc/lib/libsocket.tex
index 69877d3..1a231d3 100644
--- a/Doc/lib/libsocket.tex
+++ b/Doc/lib/libsocket.tex
@@ -331,25 +331,25 @@ Availability: \UNIX.
\end{funcdesc}
\begin{funcdesc}{ntohl}{x}
-Convert 32-bit integers from network to host byte order. On machines
+Convert 32-bit positive integers from network to host byte order. On machines
where the host byte order is the same as network byte order, this is a
no-op; otherwise, it performs a 4-byte swap operation.
\end{funcdesc}
\begin{funcdesc}{ntohs}{x}
-Convert 16-bit integers from network to host byte order. On machines
+Convert 16-bit positive integers from network to host byte order. On machines
where the host byte order is the same as network byte order, this is a
no-op; otherwise, it performs a 2-byte swap operation.
\end{funcdesc}
\begin{funcdesc}{htonl}{x}
-Convert 32-bit integers from host to network byte order. On machines
+Convert 32-bit positive integers from host to network byte order. On machines
where the host byte order is the same as network byte order, this is a
no-op; otherwise, it performs a 4-byte swap operation.
\end{funcdesc}
\begin{funcdesc}{htons}{x}
-Convert 16-bit integers from host to network byte order. On machines
+Convert 16-bit positive integers from host to network byte order. On machines
where the host byte order is the same as network byte order, this is a
no-op; otherwise, it performs a 2-byte swap operation.
\end{funcdesc}
diff --git a/Doc/lib/libtest.tex b/Doc/lib/libtest.tex
index f89c707..f30b49b 100644
--- a/Doc/lib/libtest.tex
+++ b/Doc/lib/libtest.tex
@@ -281,6 +281,7 @@ Execute the \class{unittest.TestSuite} instance \var{suite}.
The optional argument \var{testclass} accepts one of the test classes in the
suite so as to print out more detailed information on where the testing suite
originated from.
+\end{funcdesc}
The \module{test.test_support} module defines the following classes:
@@ -299,4 +300,3 @@ Temporarily set the environment variable \code{envvar} to the value of
Temporarily unset the environment variable \code{envvar}.
\end{methoddesc}
-\end{funcdesc}
diff --git a/Lib/BaseHTTPServer.py b/Lib/BaseHTTPServer.py
index 396e4d6..e4e1a14 100644
--- a/Lib/BaseHTTPServer.py
+++ b/Lib/BaseHTTPServer.py
@@ -396,7 +396,7 @@ class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
self.log_message('"%s" %s %s',
self.requestline, str(code), str(size))
- def log_error(self, *args):
+ def log_error(self, format, *args):
"""Log an error.
This is called when a request cannot be fulfilled. By
@@ -408,7 +408,7 @@ class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
"""
- self.log_message(*args)
+ self.log_message(format, *args)
def log_message(self, format, *args):
"""Log an arbitrary message.
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index eb5d97e..efb0c5c 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -5,7 +5,7 @@
import os as _os, sys as _sys
-__version__ = "1.0.1"
+__version__ = "1.1.0"
from _ctypes import Union, Structure, Array
from _ctypes import _Pointer
@@ -133,6 +133,18 @@ elif _os.name == "posix":
from _ctypes import sizeof, byref, addressof, alignment, resize
from _ctypes import _SimpleCData
+def _check_size(typ, typecode=None):
+ # Check if sizeof(ctypes_type) against struct.calcsize. This
+ # should protect somewhat against a misconfigured libffi.
+ from struct import calcsize
+ if typecode is None:
+ # Most _type_ codes are the same as used in struct
+ typecode = typ._type_
+ actual, required = sizeof(typ), calcsize(typecode)
+ if actual != required:
+ raise SystemError("sizeof(%s) wrong: %d instead of %d" % \
+ (typ, actual, required))
+
class py_object(_SimpleCData):
_type_ = "O"
def __repr__(self):
@@ -140,18 +152,23 @@ class py_object(_SimpleCData):
return super(py_object, self).__repr__()
except ValueError:
return "%s(<NULL>)" % type(self).__name__
+_check_size(py_object, "P")
class c_short(_SimpleCData):
_type_ = "h"
+_check_size(c_short)
class c_ushort(_SimpleCData):
_type_ = "H"
+_check_size(c_ushort)
class c_long(_SimpleCData):
_type_ = "l"
+_check_size(c_long)
class c_ulong(_SimpleCData):
_type_ = "L"
+_check_size(c_ulong)
if _calcsize("i") == _calcsize("l"):
# if int and long have the same size, make c_int an alias for c_long
@@ -160,15 +177,19 @@ if _calcsize("i") == _calcsize("l"):
else:
class c_int(_SimpleCData):
_type_ = "i"
+ _check_size(c_int)
class c_uint(_SimpleCData):
_type_ = "I"
+ _check_size(c_uint)
class c_float(_SimpleCData):
_type_ = "f"
+_check_size(c_float)
class c_double(_SimpleCData):
_type_ = "d"
+_check_size(c_double)
if _calcsize("l") == _calcsize("q"):
# if long and long long have the same size, make c_longlong an alias for c_long
@@ -177,33 +198,40 @@ if _calcsize("l") == _calcsize("q"):
else:
class c_longlong(_SimpleCData):
_type_ = "q"
+ _check_size(c_longlong)
class c_ulonglong(_SimpleCData):
_type_ = "Q"
## def from_param(cls, val):
## return ('d', float(val), val)
## from_param = classmethod(from_param)
+ _check_size(c_ulonglong)
class c_ubyte(_SimpleCData):
_type_ = "B"
c_ubyte.__ctype_le__ = c_ubyte.__ctype_be__ = c_ubyte
# backward compatibility:
##c_uchar = c_ubyte
+_check_size(c_ubyte)
class c_byte(_SimpleCData):
_type_ = "b"
c_byte.__ctype_le__ = c_byte.__ctype_be__ = c_byte
+_check_size(c_byte)
class c_char(_SimpleCData):
_type_ = "c"
c_char.__ctype_le__ = c_char.__ctype_be__ = c_char
+_check_size(c_char)
class c_char_p(_SimpleCData):
_type_ = "z"
+_check_size(c_char_p, "P")
class c_void_p(_SimpleCData):
_type_ = "P"
c_voidp = c_void_p # backwards compatibility (to a bug)
+_check_size(c_void_p)
# This cache maps types to pointers to them.
_pointer_type_cache = {}
diff --git a/Lib/ctypes/test/test_win32.py b/Lib/ctypes/test/test_win32.py
index 10deaca..057873c 100644
--- a/Lib/ctypes/test/test_win32.py
+++ b/Lib/ctypes/test/test_win32.py
@@ -32,12 +32,32 @@ if sys.platform == "win32" and sizeof(c_void_p) == sizeof(c_int):
# or wrong calling convention
self.assertRaises(ValueError, IsWindow, None)
+if sys.platform == "win32":
+ class FunctionCallTestCase(unittest.TestCase):
+
if is_resource_enabled("SEH"):
def test_SEH(self):
- # Call functions with invalid arguments, and make sure that access violations
- # are trapped and raise an exception.
+ # Call functions with invalid arguments, and make sure
+ # that access violations are trapped and raise an
+ # exception.
self.assertRaises(WindowsError, windll.kernel32.GetModuleHandleA, 32)
+ def test_noargs(self):
+ # This is a special case on win32 x64
+ windll.user32.GetDesktopWindow()
+
+ class TestWintypes(unittest.TestCase):
+ def test_HWND(self):
+ from ctypes import wintypes
+ self.failUnlessEqual(sizeof(wintypes.HWND), sizeof(c_void_p))
+
+ def test_PARAM(self):
+ from ctypes import wintypes
+ self.failUnlessEqual(sizeof(wintypes.WPARAM),
+ sizeof(c_void_p))
+ self.failUnlessEqual(sizeof(wintypes.LPARAM),
+ sizeof(c_void_p))
+
class Structures(unittest.TestCase):
def test_struct_by_value(self):
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
index e65646a..46f4c55 100644
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -46,24 +46,17 @@ elif os.name == "posix":
import re, tempfile, errno
def _findLib_gcc(name):
- expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
+ expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
fdout, ccout = tempfile.mkstemp()
os.close(fdout)
- cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \
+ cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; else CC=cc; fi;' \
'$CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
- fdout, outfile = tempfile.mkstemp()
- os.close(fdout)
- fd = os.popen(cmd)
- trace = fd.read()
- err = fd.close()
+ f = os.popen(cmd)
+ trace = f.read()
+ f.close()
finally:
try:
- os.unlink(outfile)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- try:
os.unlink(ccout)
except OSError as e:
if e.errno != errno.ENOENT:
@@ -73,29 +66,58 @@ elif os.name == "posix":
return None
return res.group(0)
- def _findLib_ld(name):
- expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
- res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read())
- if not res:
- # Hm, this works only for libs needed by the python executable.
- cmd = 'ldd %s 2>/dev/null' % sys.executable
- res = re.search(expr, os.popen(cmd).read())
- if not res:
- return None
- return res.group(0)
-
def _get_soname(f):
+ # assuming GNU binutils / ELF
+ if not f:
+ return None
cmd = "objdump -p -j .dynamic 2>/dev/null " + f
res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read())
if not res:
return None
return res.group(1)
- def find_library(name):
- lib = _findLib_ld(name) or _findLib_gcc(name)
- if not lib:
- return None
- return _get_soname(lib)
+ if (sys.platform.startswith("freebsd")
+ or sys.platform.startswith("openbsd")
+ or sys.platform.startswith("dragonfly")):
+
+ def _num_version(libname):
+ # "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
+ parts = libname.split(".")
+ nums = []
+ try:
+ while parts:
+ nums.insert(0, int(parts.pop()))
+ except ValueError:
+ pass
+ return nums or [ sys.maxint ]
+
+ def find_library(name):
+ ename = re.escape(name)
+ expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
+ res = re.findall(expr,
+ os.popen('/sbin/ldconfig -r 2>/dev/null').read())
+ if not res:
+ return _get_soname(_findLib_gcc(name))
+ res.sort(cmp= lambda x,y: cmp(_num_version(x), _num_version(y)))
+ return res[-1]
+
+ else:
+
+ def _findLib_ldconfig(name):
+ # XXX assuming GLIBC's ldconfig (with option -p)
+ expr = r'/[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
+ res = re.search(expr,
+ os.popen('/sbin/ldconfig -p 2>/dev/null').read())
+ if not res:
+ # Hm, this works only for libs needed by the python executable.
+ cmd = 'ldd %s 2>/dev/null' % sys.executable
+ res = re.search(expr, os.popen(cmd).read())
+ if not res:
+ return None
+ return res.group(0)
+
+ def find_library(name):
+ return _get_soname(_findLib_ldconfig(name) or _findLib_gcc(name))
################################################################
# test code
diff --git a/Lib/ctypes/wintypes.py b/Lib/ctypes/wintypes.py
index 9768233..a0fc0bb 100644
--- a/Lib/ctypes/wintypes.py
+++ b/Lib/ctypes/wintypes.py
@@ -34,8 +34,14 @@ LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
LPCWSTR = LPWSTR = c_wchar_p
LPCSTR = LPSTR = c_char_p
-WPARAM = c_uint
-LPARAM = c_long
+# WPARAM is defined as UINT_PTR (unsigned type)
+# LPARAM is defined as LONG_PTR (signed type)
+if sizeof(c_long) == sizeof(c_void_p):
+ WPARAM = c_ulong
+ LPARAM = c_long
+elif sizeof(c_longlong) == sizeof(c_void_p):
+ WPARAM = c_ulonglong
+ LPARAM = c_longlong
ATOM = WORD
LANGID = WORD
@@ -48,7 +54,7 @@ LCID = DWORD
################################################################
# HANDLE types
-HANDLE = c_ulong # in the header files: void *
+HANDLE = c_void_p # in the header files: void *
HACCEL = HANDLE
HBITMAP = HANDLE
diff --git a/Lib/idlelib/EditorWindow.py b/Lib/idlelib/EditorWindow.py
index 400c31c..396e2bd 100644
--- a/Lib/idlelib/EditorWindow.py
+++ b/Lib/idlelib/EditorWindow.py
@@ -819,7 +819,7 @@ class EditorWindow(object):
def close(self):
reply = self.maybesave()
- if reply != "cancel":
+ if str(reply) != "cancel":
self._close()
return reply
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 615dcc7..1415226 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -41,8 +41,8 @@ except ImportError:
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
-__version__ = "0.5.0.0"
-__date__ = "08 January 2007"
+__version__ = "0.5.0.1"
+__date__ = "09 January 2007"
#---------------------------------------------------------------------------
# Miscellaneous module data
@@ -764,17 +764,15 @@ class FileHandler(StreamHandler):
"""
Open the specified file and use it as the stream for logging.
"""
- if codecs is None:
- encoding = None
- if encoding is None:
- stream = open(filename, mode)
- else:
- stream = codecs.open(filename, mode, encoding)
- StreamHandler.__init__(self, stream)
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
+ if codecs is None:
+ encoding = None
self.baseFilename = os.path.abspath(filename)
self.mode = mode
+ self.encoding = encoding
+ stream = self._open()
+ StreamHandler.__init__(self, stream)
def close(self):
"""
@@ -784,6 +782,17 @@ class FileHandler(StreamHandler):
self.stream.close()
StreamHandler.close(self)
+ def _open(self):
+ """
+ Open the current base file with the (original) mode and encoding.
+ Return the resulting stream.
+ """
+ if self.encoding is None:
+ stream = open(self.baseFilename, self.mode)
+ else:
+ stream = codecs.open(self.baseFilename, self.mode, self.encoding)
+ return stream
+
#---------------------------------------------------------------------------
# Manager classes and functions
#---------------------------------------------------------------------------
diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
index 82896ad..41cbca1 100644
--- a/Lib/logging/handlers.py
+++ b/Lib/logging/handlers.py
@@ -32,6 +32,7 @@ try:
import cPickle as pickle
except ImportError:
import pickle
+from stat import ST_DEV, ST_INO
try:
import codecs
@@ -286,6 +287,54 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
self.stream = open(self.baseFilename, 'w')
self.rolloverAt = self.rolloverAt + self.interval
+class WatchedFileHandler(logging.FileHandler):
+ """
+ A handler for logging to a file, which watches the file
+ to see if it has changed while in use. This can happen because of
+ usage of programs such as newsyslog and logrotate which perform
+ log file rotation. This handler, intended for use under Unix,
+ watches the file to see if it has changed since the last emit.
+ (A file has changed if its device or inode have changed.)
+ If it has changed, the old file stream is closed, and the file
+ opened to get a new stream.
+
+ This handler is not appropriate for use under Windows, because
+ under Windows open files cannot be moved or renamed - logging
+ opens the files with exclusive locks - and so there is no need
+ for such a handler. Furthermore, ST_INO is not supported under
+ Windows; stat always returns zero for this value.
+
+ This handler is based on a suggestion and patch by Chad J.
+ Schroeder.
+ """
+ def __init__(self, filename, mode='a', encoding=None):
+ logging.FileHandler.__init__(self, filename, mode, encoding)
+ stat = os.stat(self.baseFilename)
+ self.dev, self.ino = stat[ST_DEV], stat[ST_INO]
+
+ def emit(self, record):
+ """
+ Emit a record.
+
+ First check if the underlying file has changed, and if it
+ has, close the old stream and reopen the file to get the
+ current stream.
+ """
+ if not os.path.exists(self.baseFilename):
+ stat = None
+ changed = 1
+ else:
+ stat = os.stat(self.baseFilename)
+ changed = (stat[ST_DEV] != self.dev) or (stat[ST_INO] != self.ino)
+ if changed:
+ self.stream.flush()
+ self.stream.close()
+ self.stream = self._open()
+ if stat is None:
+ stat = os.stat(self.baseFilename)
+ self.dev, self.ino = stat[ST_DEV], stat[ST_INO]
+ logging.FileHandler.emit(self, record)
+
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
diff --git a/Lib/platform.py b/Lib/platform.py
index cf58819..ba24b28 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -28,12 +28,15 @@
# Betancourt, Randall Hopper, Karl Putland, John Farrell, Greg
# Andruk, Just van Rossum, Thomas Heller, Mark R. Levinson, Mark
# Hammond, Bill Tutt, Hans Nowak, Uwe Zessin (OpenVMS support),
-# Colin Kong, Trent Mick, Guido van Rossum
+# Colin Kong, Trent Mick, Guido van Rossum, Anthony Baxter
#
# History:
#
# <see CVS and SVN checkin messages for history>
#
+# 1.0.6 - added linux_distribution()
+# 1.0.5 - fixed Java support to allow running the module on Jython
+# 1.0.4 - added IronPython support
# 1.0.3 - added normalization of Windows system name
# 1.0.2 - added more Windows support
# 1.0.1 - reformatted to make doc.py happy
@@ -88,7 +91,7 @@
__copyright__ = """
Copyright (c) 1999-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
- Copyright (c) 2000-2003, eGenix.com Software GmbH; mailto:info@egenix.com
+ Copyright (c) 2000-2007, eGenix.com Software GmbH; mailto:info@egenix.com
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee or royalty is hereby granted,
@@ -107,7 +110,7 @@ __copyright__ = """
"""
-__version__ = '1.0.4'
+__version__ = '1.0.6'
import sys,string,os,re
@@ -136,6 +139,11 @@ def libc_ver(executable=sys.executable,lib='',version='',
The file is read and scanned in chunks of chunksize bytes.
"""
+ if hasattr(os.path, 'realpath'):
+ # Python 2.2 introduced os.path.realpath(); it is used
+ # here to work around problems with Cygwin not being
+ # able to open symlinks for reading
+ executable = os.path.realpath(executable)
f = open(executable,'rb')
binary = f.read(chunksize)
pos = 0
@@ -218,14 +226,69 @@ def _dist_try_harder(distname,version,id):
return distname,version,id
_release_filename = re.compile(r'(\w+)[-_](release|version)')
-_release_version = re.compile(r'([\d.]+)[^(]*(?:\((.+)\))?')
-
-# Note:In supported_dists below we need 'fedora' before 'redhat' as in
-# Fedora redhat-release is a link to fedora-release.
-
-def dist(distname='',version='',id='',
-
- supported_dists=('SuSE', 'debian', 'fedora', 'redhat', 'mandrake')):
+_lsb_release_version = re.compile(r'(.+)'
+ ' release '
+ '([\d.]+)'
+ '[^(]*(?:\((.+)\))?')
+_release_version = re.compile(r'([^0-9]+)'
+ '(?: release )?'
+ '([\d.]+)'
+ '[^(]*(?:\((.+)\))?')
+
+# See also http://www.novell.com/coolsolutions/feature/11251.html
+# and http://linuxmafia.com/faq/Admin/release-files.html
+# and http://data.linux-ntfs.org/rpm/whichrpm
+# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
+
+_supported_dists = ('SuSE', 'debian', 'fedora', 'redhat', 'centos',
+ 'mandrake', 'rocks', 'slackware', 'yellowdog',
+ 'gentoo', 'UnitedLinux')
+
+def _parse_release_file(firstline):
+
+ # Parse the first line
+ m = _lsb_release_version.match(firstline)
+ if m is not None:
+ # LSB format: "distro release x.x (codename)"
+ return tuple(m.groups())
+
+ # Pre-LSB format: "distro x.x (codename)"
+ m = _release_version.match(firstline)
+ if m is not None:
+ return tuple(m.groups())
+
+ # Unkown format... take the first two words
+ l = string.split(string.strip(firstline))
+ if l:
+ version = l[0]
+ if len(l) > 1:
+ id = l[1]
+ else:
+ id = ''
+ return '', version, id
+
+def _test_parse_release_file():
+
+ for input, output in (
+ # Examples of release file contents:
+ ('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64'))
+ ('SUSE LINUX 10.1 (X86-64)', ('SUSE LINUX ', '10.1', 'X86-64'))
+ ('SUSE LINUX 10.1 (i586)', ('SUSE LINUX ', '10.1', 'i586'))
+ ('Fedora Core release 5 (Bordeaux)', ('Fedora Core', '5', 'Bordeaux'))
+ ('Red Hat Linux release 8.0 (Psyche)', ('Red Hat Linux', '8.0', 'Psyche'))
+ ('Red Hat Linux release 9 (Shrike)', ('Red Hat Linux', '9', 'Shrike'))
+ ('Red Hat Enterprise Linux release 4 (Nahant)', ('Red Hat Enterprise Linux', '4', 'Nahant'))
+ ('CentOS release 4', ('CentOS', '4', None))
+ ('Rocks release 4.2.1 (Cydonia)', ('Rocks', '4.2.1', 'Cydonia'))
+ ):
+ parsed = _parse_release_file(input)
+ if parsed != output:
+ print (input, parsed)
+
+def linux_distribution(distname='', version='', id='',
+
+ supported_dists=_supported_dists,
+ full_distribution_name=1):
""" Tries to determine the name of the Linux OS distribution name.
@@ -233,6 +296,15 @@ def dist(distname='',version='',id='',
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
+ supported_dists may be given to define the set of Linux
+ distributions to look for. It defaults to a list of currently
+ supported Linux distributions identified by their release file
+ name.
+
+ If full_distribution_name is true (default), the full
+ distribution read from the OS is returned. Otherwise the short
+ name taken from supported_dists is used.
+
Returns a tuple (distname,version,id) which default to the
args given as parameters.
@@ -242,33 +314,50 @@ def dist(distname='',version='',id='',
except os.error:
# Probably not a Unix system
return distname,version,id
+ etc.sort()
for file in etc:
m = _release_filename.match(file)
- if m:
+ if m is not None:
_distname,dummy = m.groups()
if _distname in supported_dists:
distname = _distname
break
else:
return _dist_try_harder(distname,version,id)
- f = open('/etc/'+file,'r')
+
+ # Read the first line
+ f = open('/etc/'+file, 'r')
firstline = f.readline()
f.close()
- m = _release_version.search(firstline)
- if m:
- _version,_id = m.groups()
- if _version:
- version = _version
- if _id:
- id = _id
- else:
- # Unkown format... take the first two words
- l = string.split(string.strip(firstline))
- if l:
- version = l[0]
- if len(l) > 1:
- id = l[1]
- return distname,version,id
+ _distname, _version, _id = _parse_release_file(firstline)
+
+ if _distname and full_distribution_name:
+ distname = _distname
+ if _version:
+ version = _version
+ if _id:
+ id = _id
+ return distname, version, id
+
+# To maintain backwards compatibility:
+
+def dist(distname='',version='',id='',
+
+ supported_dists=_supported_dists):
+
+ """ Tries to determine the name of the Linux OS distribution name.
+
+ The function first looks for a distribution release file in
+ /etc and then reverts to _dist_try_harder() in case no
+ suitable files are found.
+
+ Returns a tuple (distname,version,id) which default to the
+ args given as parameters.
+
+ """
+ return linux_distribution(distname, version, id,
+ supported_dists=supported_dists,
+ full_distribution_name=0)
class _popen:
@@ -357,7 +446,7 @@ def popen(cmd, mode='r', bufsize=None):
else:
return popen(cmd,mode,bufsize)
-def _norm_version(version,build=''):
+def _norm_version(version, build=''):
""" Normalize the version and build strings and return a single
version string using the format major.minor.build (or patchlevel).
@@ -378,7 +467,7 @@ _ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
'.*'
'Version ([\d.]+))')
-def _syscmd_ver(system='',release='',version='',
+def _syscmd_ver(system='', release='', version='',
supported_platforms=('win32','win16','dos','os2')):
@@ -418,7 +507,7 @@ def _syscmd_ver(system='',release='',version='',
# Parse the output
info = string.strip(info)
m = _ver_output.match(info)
- if m:
+ if m is not None:
system,release,version = m.groups()
# Strip trailing dots from version and release
if release[-1] == '.':
@@ -615,8 +704,11 @@ def _java_getprop(name,default):
from java.lang import System
try:
- return System.getProperty(name)
- except:
+ value = System.getProperty(name)
+ if value is None:
+ return default
+ return value
+ except AttributeError:
return default
def java_ver(release='',vendor='',vminfo=('','',''),osinfo=('','','')):
@@ -637,20 +729,20 @@ def java_ver(release='',vendor='',vminfo=('','',''),osinfo=('','','')):
except ImportError:
return release,vendor,vminfo,osinfo
- vendor = _java_getprop('java.vendor',vendor)
- release = _java_getprop('java.version',release)
- vm_name,vm_release,vm_vendor = vminfo
- vm_name = _java_getprop('java.vm.name',vm_name)
- vm_vendor = _java_getprop('java.vm.vendor',vm_vendor)
- vm_release = _java_getprop('java.vm.version',vm_release)
- vminfo = vm_name,vm_release,vm_vendor
- os_name,os_version,os_arch = osinfo
- os_arch = _java_getprop('java.os.arch',os_arch)
- os_name = _java_getprop('java.os.name',os_name)
- os_version = _java_getprop('java.os.version',os_version)
- osinfo = os_name,os_version,os_arch
-
- return release,vendor,vminfo,osinfo
+ vendor = _java_getprop('java.vendor', vendor)
+ release = _java_getprop('java.version', release)
+ vm_name, vm_release, vm_vendor = vminfo
+ vm_name = _java_getprop('java.vm.name', vm_name)
+ vm_vendor = _java_getprop('java.vm.vendor', vm_vendor)
+ vm_release = _java_getprop('java.vm.version', vm_release)
+ vminfo = vm_name, vm_release, vm_vendor
+ os_name, os_version, os_arch = osinfo
+ os_arch = _java_getprop('java.os.arch', os_arch)
+ os_name = _java_getprop('java.os.name', os_name)
+ os_version = _java_getprop('java.os.version', os_version)
+ osinfo = os_name, os_version, os_arch
+
+ return release, vendor, vminfo, osinfo
### System name aliasing
@@ -716,7 +808,7 @@ def _platform(*args):
# Format the platform string
platform = string.join(
map(string.strip,
- filter(len,args)),
+ filter(len, args)),
'-')
# Cleanup some possible filename obstacles...
@@ -871,7 +963,10 @@ def architecture(executable=sys.executable,bits='',linkage=''):
bits = str(size*8) + 'bit'
# Get data from the 'file' system command
- output = _syscmd_file(executable,'')
+ if executable:
+ output = _syscmd_file(executable, '')
+ else:
+ output = ''
if not output and \
executable == sys.executable:
@@ -960,6 +1055,10 @@ def uname():
release,version,csd,ptype = win32_ver()
if release and version:
use_syscmd_ver = 0
+ # XXX Should try to parse the PROCESSOR_* environment variables
+ # available on Win XP and later; see
+ # http://support.microsoft.com/kb/888731 and
+ # http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
# Try the 'ver' system command available on some
# platforms
@@ -1092,36 +1191,136 @@ def processor():
### Various APIs for extracting information from sys.version
-_sys_version_parser = re.compile(r'([\w.+]+)\s*'
- '\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*'
- '\[([^\]]+)\]?')
-_sys_version_cache = None
+_sys_version_parser = re.compile(
+ r'([\w.+]+)\s*'
+ '\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*'
+ '\[([^\]]+)\]?')
+
+_jython_sys_version_parser = re.compile(
+ r'([\d\.]+)')
+
+_ironpython_sys_version_parser = re.compile(
+ r'IronPython\s*'
+ '([\d\.]+)'
+ '(?: \(([\d\.]+)\))?'
+ ' on (.NET [\d\.]+)')
-def _sys_version():
+_sys_version_cache = {}
+
+def _sys_version(sys_version=None):
""" Returns a parsed version of Python's sys.version as tuple
- (version, buildno, builddate, compiler) referring to the Python
- version, build number, build date/time as string and the compiler
- identification string.
+ (name, version, branch, revision, buildno, builddate, compiler)
+ referring to the Python implementation name, version, branch,
+ revision, build number, build date/time as string and the compiler
+ identification string.
Note that unlike the Python sys.version, the returned value
for the Python version will always include the patchlevel (it
defaults to '.0').
+ The function returns empty strings for tuple entries that
+ cannot be determined.
+
+ sys_version may be given to parse an alternative version
+ string, e.g. if the version was read from a different Python
+ interpreter.
+
"""
- global _sys_version_cache
+ # Get the Python version
+ if sys_version is None:
+ sys_version = sys.version
- if _sys_version_cache is not None:
- return _sys_version_cache
- version, buildno, builddate, buildtime, compiler = \
- _sys_version_parser.match(sys.version).groups()
- builddate = builddate + ' ' + buildtime
+ # Try the cache first
+ result = _sys_version_cache.get(sys_version, None)
+ if result is not None:
+ return result
+
+ # Parse it
+ if sys_version[:10] == 'IronPython':
+ # IronPython
+ name = 'IronPython'
+ match = _ironpython_sys_version_parser.match(sys_version)
+ if match is None:
+ raise ValueError(
+ 'failed to parse IronPython sys.version: %s' %
+ repr(sys_version))
+ version, alt_version, compiler = match.groups()
+ branch = ''
+ revision = ''
+ buildno = ''
+ builddate = ''
+
+ elif sys.platform[:4] == 'java':
+ # Jython
+ name = 'Jython'
+ match = _jython_sys_version_parser.match(sys_version)
+ if match is None:
+ raise ValueError(
+ 'failed to parse Jython sys.version: %s' %
+ repr(sys_version))
+ version, = match.groups()
+ branch = ''
+ revision = ''
+ compiler = sys.platform
+ buildno = ''
+ builddate = ''
+
+ else:
+ # CPython
+ match = _sys_version_parser.match(sys_version)
+ if match is None:
+ raise ValueError(
+ 'failed to parse CPython sys.version: %s' %
+ repr(sys_version))
+ version, buildno, builddate, buildtime, compiler = \
+ match.groups()
+ if hasattr(sys, 'subversion'):
+ # sys.subversion was added in Python 2.5
+ name, branch, revision = sys.subversion
+ else:
+ name = 'CPython'
+ branch = ''
+ revision = ''
+ builddate = builddate + ' ' + buildtime
+
+ # Add the patchlevel version if missing
l = string.split(version, '.')
if len(l) == 2:
l.append('0')
version = string.join(l, '.')
- _sys_version_cache = (version, buildno, builddate, compiler)
- return _sys_version_cache
+
+ # Build and cache the result
+ result = (name, version, branch, revision, buildno, builddate, compiler)
+ _sys_version_cache[sys_version] = result
+ return result
+
+def _test_sys_version():
+
+ _sys_version_cache.clear()
+ for input, output in (
+ ('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]',
+ ('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')),
+ ('IronPython 1.0.60816 on .NET 2.0.50727.42',
+ ('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')),
+ ('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42',
+ ('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
+ ):
+ parsed = _sys_version(input)
+ if parsed != output:
+ print (input, parsed)
+
+def python_implementation():
+
+ """ Returns a string identifying the Python implementation.
+
+ Currently, the following implementations are identified:
+ 'CPython' (C implementation of Python),
+ 'IronPython' (.NET implementation of Python),
+ 'Jython' (Java implementation of Python).
+
+ """
+ return _sys_version()[0]
def python_version():
@@ -1131,7 +1330,9 @@ def python_version():
will always include the patchlevel (it defaults to 0).
"""
- return _sys_version()[0]
+ if hasattr(sys, 'version_info'):
+ return '%i.%i.%i' % sys.version_info[:3]
+ return _sys_version()[1]
def python_version_tuple():
@@ -1142,7 +1343,36 @@ def python_version_tuple():
will always include the patchlevel (it defaults to 0).
"""
- return string.split(_sys_version()[0], '.')
+ if hasattr(sys, 'version_info'):
+ return sys.version_info[:3]
+ return tuple(string.split(_sys_version()[1], '.'))
+
+def python_branch():
+
+ """ Returns a string identifying the Python implementation
+ branch.
+
+ For CPython this is the Subversion branch from which the
+ Python binary was built.
+
+ If not available, an empty string is returned.
+
+ """
+
+ return _sys_version()[2]
+
+def python_revision():
+
+ """ Returns a string identifying the Python implementation
+ revision.
+
+ For CPython this is the Subversion revision from which the
+ Python binary was built.
+
+ If not available, an empty string is returned.
+
+ """
+ return _sys_version()[3]
def python_build():
@@ -1150,7 +1380,7 @@ def python_build():
build number and date as strings.
"""
- return _sys_version()[1:3]
+ return _sys_version()[4:6]
def python_compiler():
@@ -1158,7 +1388,7 @@ def python_compiler():
Python.
"""
- return _sys_version()[3]
+ return _sys_version()[6]
### The Opus Magnum of platform strings :-)
@@ -1219,7 +1449,7 @@ def platform(aliased=0, terse=0):
elif system == 'Java':
# Java platforms
r,v,vminfo,(os_name,os_version,os_arch) = java_ver()
- if terse:
+ if terse or not os_name:
platform = _platform(system,release,version)
else:
platform = _platform(system,release,version,
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 892e278..dd10334 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1448,6 +1448,9 @@ def locate(path, forceload=0):
text = TextDoc()
html = HTMLDoc()
+class _OldStyleClass: pass
+_OLD_INSTANCE_TYPE = type(_OldStyleClass())
+
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
@@ -1468,12 +1471,16 @@ def doc(thing, title='Python Library Documentation: %s', forceload=0):
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
- if not (inspect.ismodule(object) or
- inspect.isclass(object) or
- inspect.isroutine(object) or
- inspect.isgetsetdescriptor(object) or
- inspect.ismemberdescriptor(object) or
- isinstance(object, property)):
+ if type(object) is _OLD_INSTANCE_TYPE:
+ # If the passed object is an instance of an old-style class,
+ # document its available methods instead of its value.
+ object = object.__class__
+ elif not (inspect.ismodule(object) or
+ inspect.isclass(object) or
+ inspect.isroutine(object) or
+ inspect.isgetsetdescriptor(object) or
+ inspect.ismemberdescriptor(object) or
+ isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
diff --git a/Lib/sqlite3/dbapi2.py b/Lib/sqlite3/dbapi2.py
index 9b7d56d..7eb28e8 100644
--- a/Lib/sqlite3/dbapi2.py
+++ b/Lib/sqlite3/dbapi2.py
@@ -68,7 +68,7 @@ def register_adapters_and_converters():
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
if len(timepart_full) == 2:
- microseconds = int(float("0." + timepart_full[1]) * 1000000)
+ microseconds = int(timepart_full[1])
else:
microseconds = 0
diff --git a/Lib/sqlite3/test/factory.py b/Lib/sqlite3/test/factory.py
index 8778056..8a77d5d 100644
--- a/Lib/sqlite3/test/factory.py
+++ b/Lib/sqlite3/test/factory.py
@@ -91,7 +91,7 @@ class RowFactoryTests(unittest.TestCase):
list),
"row is not instance of list")
- def CheckSqliteRow(self):
+ def CheckSqliteRowIndex(self):
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
self.failUnless(isinstance(row,
@@ -110,6 +110,27 @@ class RowFactoryTests(unittest.TestCase):
self.failUnless(col1 == 1, "by index: wrong result for column 0")
self.failUnless(col2 == 2, "by index: wrong result for column 1")
+ def CheckSqliteRowIter(self):
+ """Checks if the row object is iterable"""
+ self.con.row_factory = sqlite.Row
+ row = self.con.execute("select 1 as a, 2 as b").fetchone()
+ for col in row:
+ pass
+
+ def CheckSqliteRowAsTuple(self):
+ """Checks if the row object can be converted to a tuple"""
+ self.con.row_factory = sqlite.Row
+ row = self.con.execute("select 1 as a, 2 as b").fetchone()
+ t = tuple(row)
+
+ def CheckSqliteRowAsDict(self):
+ """Checks if the row object can be correctly converted to a dictionary"""
+ self.con.row_factory = sqlite.Row
+ row = self.con.execute("select 1 as a, 2 as b").fetchone()
+ d = dict(row)
+ self.failUnlessEqual(d["a"], row["a"])
+ self.failUnlessEqual(d["b"], row["b"])
+
def tearDown(self):
self.con.close()
diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py
index c8733b9..addedb1 100644
--- a/Lib/sqlite3/test/regression.py
+++ b/Lib/sqlite3/test/regression.py
@@ -69,6 +69,16 @@ class RegressionTests(unittest.TestCase):
cur.execute('select 1 as "foo baz"')
self.failUnlessEqual(cur.description[0][0], "foo baz")
+ def CheckStatementAvailable(self):
+ # pysqlite up to 2.3.2 crashed on this, because the active statement handle was not checked
+ # before trying to fetch data from it. close() destroys the active statement ...
+ con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
+ cur = con.cursor()
+ cur.execute("select 4 union select 5")
+ cur.close()
+ cur.fetchone()
+ cur.fetchone()
+
def suite():
regression_suite = unittest.makeSuite(RegressionTests, "Check")
return unittest.TestSuite((regression_suite,))
diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py
index 9a11f5c..a357b2e 100644
--- a/Lib/sqlite3/test/types.py
+++ b/Lib/sqlite3/test/types.py
@@ -112,6 +112,7 @@ class DeclTypesTests(unittest.TestCase):
# and implement two custom ones
sqlite.converters["BOOL"] = lambda x: bool(int(x))
sqlite.converters["FOO"] = DeclTypesTests.Foo
+ sqlite.converters["WRONG"] = lambda x: "WRONG"
def tearDown(self):
del sqlite.converters["FLOAT"]
@@ -123,7 +124,7 @@ class DeclTypesTests(unittest.TestCase):
def CheckString(self):
# default
self.cur.execute("insert into test(s) values (?)", ("foo",))
- self.cur.execute("select s from test")
+ self.cur.execute('select s as "s [WRONG]" from test')
row = self.cur.fetchone()
self.failUnlessEqual(row[0], "foo")
@@ -210,26 +211,32 @@ class DeclTypesTests(unittest.TestCase):
class ColNamesTests(unittest.TestCase):
def setUp(self):
- self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES|sqlite.PARSE_DECLTYPES)
+ self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
self.cur = self.con.cursor()
self.cur.execute("create table test(x foo)")
sqlite.converters["FOO"] = lambda x: "[%s]" % x
sqlite.converters["BAR"] = lambda x: "<%s>" % x
sqlite.converters["EXC"] = lambda x: 5/0
+ sqlite.converters["B1B1"] = lambda x: "MARKER"
def tearDown(self):
del sqlite.converters["FOO"]
del sqlite.converters["BAR"]
del sqlite.converters["EXC"]
+ del sqlite.converters["B1B1"]
self.cur.close()
self.con.close()
- def CheckDeclType(self):
+ def CheckDeclTypeNotUsed(self):
+ """
+ Assures that the declared type is not used when PARSE_DECLTYPES
+ is not set.
+ """
self.cur.execute("insert into test(x) values (?)", ("xxx",))
self.cur.execute("select x from test")
val = self.cur.fetchone()[0]
- self.failUnlessEqual(val, "[xxx]")
+ self.failUnlessEqual(val, "xxx")
def CheckNone(self):
self.cur.execute("insert into test(x) values (?)", (None,))
@@ -247,6 +254,11 @@ class ColNamesTests(unittest.TestCase):
# whitespace should be stripped.
self.failUnlessEqual(self.cur.description[0][0], "x")
+ def CheckCaseInConverterName(self):
+ self.cur.execute("""select 'other' as "x [b1b1]\"""")
+ val = self.cur.fetchone()[0]
+ self.failUnlessEqual(val, "MARKER")
+
def CheckCursorDescriptionNoRow(self):
"""
cursor.description should at least provide the column name(s), even if
@@ -340,6 +352,13 @@ class DateTimeTests(unittest.TestCase):
ts2 = self.cur.fetchone()[0]
self.failUnlessEqual(ts, ts2)
+ def CheckDateTimeSubSecondsFloatingPoint(self):
+ ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0, 510241)
+ self.cur.execute("insert into test(ts) values (?)", (ts,))
+ self.cur.execute("select ts from test")
+ ts2 = self.cur.fetchone()[0]
+ self.failUnlessEqual(ts, ts2)
+
def suite():
sqlite_type_suite = unittest.makeSuite(SqliteTypeTests, "Check")
decltypes_type_suite = unittest.makeSuite(DeclTypesTests, "Check")
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index f5800fb..6656890 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -500,7 +500,7 @@ def list2cmdline(seq):
if result:
result.append(' ')
- needquote = (" " in arg) or ("\t" in arg)
+ needquote = (" " in arg) or ("\t" in arg) or arg == ""
if needquote:
result.append('"')
diff --git a/Lib/test/test___all__.py b/Lib/test/test___all__.py
index e4f0f44..d8e850a 100644
--- a/Lib/test/test___all__.py
+++ b/Lib/test/test___all__.py
@@ -132,7 +132,6 @@ class AllTest(unittest.TestCase):
self.check_all("rlcompleter")
self.check_all("robotparser")
self.check_all("sched")
- self.check_all("sets")
self.check_all("sgmllib")
self.check_all("shelve")
self.check_all("shlex")
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index aa6525e..e05fbc6 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -1500,8 +1500,16 @@ class TestHelp(BaseTest):
self.assertHelpEquals(_expected_help_long_opts_first)
def test_help_title_formatter(self):
- self.parser.formatter = TitledHelpFormatter()
- self.assertHelpEquals(_expected_help_title_formatter)
+ save = os.environ.get("COLUMNS")
+ try:
+ os.environ["COLUMNS"] = "80"
+ self.parser.formatter = TitledHelpFormatter()
+ self.assertHelpEquals(_expected_help_title_formatter)
+ finally:
+ if save is not None:
+ os.environ["COLUMNS"] = save
+ else:
+ del os.environ["COLUMNS"]
def test_wrap_columns(self):
# Ensure that wrapping respects $COLUMNS environment variable.
diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py
index 0d08b79..a1c797c 100644
--- a/Lib/test/test_set.py
+++ b/Lib/test/test_set.py
@@ -476,6 +476,16 @@ class SetSubclass(set):
class TestSetSubclass(TestSet):
thetype = SetSubclass
+class SetSubclassWithKeywordArgs(set):
+ def __init__(self, iterable=[], newarg=None):
+ set.__init__(self, iterable)
+
+class TestSetSubclassWithKeywordArgs(TestSet):
+
+ def test_keywords_in_subclass(self):
+ 'SF bug #1486663 -- this used to erroneously raise a TypeError'
+ SetSubclassWithKeywordArgs(newarg=1)
+
class TestFrozenSet(TestJointOps):
thetype = frozenset
@@ -1454,6 +1464,7 @@ def test_main(verbose=None):
test_classes = (
TestSet,
TestSetSubclass,
+ TestSetSubclassWithKeywordArgs,
TestFrozenSet,
TestFrozenSetSubclass,
TestSetOfSets,
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index fcc0f45..1b34d61 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -430,6 +430,8 @@ class ProcessTestCase(unittest.TestCase):
'"a\\\\b c" d e')
self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
'"a\\\\b\\ c" d e')
+ self.assertEqual(subprocess.list2cmdline(['ab', '']),
+ 'ab ""')
def test_poll(self):
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 19ce69f..e4ee3db 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -484,6 +484,8 @@ Parser/metagrammar.o: $(srcdir)/Parser/metagrammar.c
Parser/tokenizer_pgen.o: $(srcdir)/Parser/tokenizer.c
+Parser/pgenmain.o: $(srcdir)/Include/parsetok.h
+
$(AST_H): $(AST_ASDL) $(ASDLGEN_FILES)
$(ASDLGEN) -h $(AST_H_DIR) $(AST_ASDL)
@@ -537,6 +539,7 @@ PYTHON_HEADERS= \
Include/moduleobject.h \
Include/object.h \
Include/objimpl.h \
+ Include/parsetok.h \
Include/patchlevel.h \
Include/pyarena.h \
Include/pydebug.h \
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 319d15e..b0768d6 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -4749,7 +4749,7 @@ init_ctypes(void)
#endif
PyModule_AddObject(m, "FUNCFLAG_CDECL", PyInt_FromLong(FUNCFLAG_CDECL));
PyModule_AddObject(m, "FUNCFLAG_PYTHONAPI", PyInt_FromLong(FUNCFLAG_PYTHONAPI));
- PyModule_AddStringConstant(m, "__version__", "1.0.1");
+ PyModule_AddStringConstant(m, "__version__", "1.1.0");
PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
PyModule_AddObject(m, "_memset_addr", PyLong_FromVoidPtr(memset));
diff --git a/Modules/_ctypes/libffi_msvc/ffi.c b/Modules/_ctypes/libffi_msvc/ffi.c
index 3f23a05..e3e2344 100644
--- a/Modules/_ctypes/libffi_msvc/ffi.c
+++ b/Modules/_ctypes/libffi_msvc/ffi.c
@@ -224,7 +224,8 @@ ffi_call(/*@dependent@*/ ffi_cif *cif,
#else
case FFI_SYSV:
/*@-usedef@*/
- return ffi_call_AMD64(ffi_prep_args, &ecif, cif->bytes,
+ /* Function call needs at least 40 bytes stack size, on win64 AMD64 */
+ return ffi_call_AMD64(ffi_prep_args, &ecif, cif->bytes ? cif->bytes : 40,
cif->flags, ecif.rvalue, fn);
/*@=usedef@*/
break;
diff --git a/Modules/_sqlite/cache.c b/Modules/_sqlite/cache.c
index 6962695..18a4066 100644
--- a/Modules/_sqlite/cache.c
+++ b/Modules/_sqlite/cache.c
@@ -25,11 +25,11 @@
#include <limits.h>
/* only used internally */
-Node* new_node(PyObject* key, PyObject* data)
+pysqlite_Node* pysqlite_new_node(PyObject* key, PyObject* data)
{
- Node* node;
+ pysqlite_Node* node;
- node = (Node*) (NodeType.tp_alloc(&NodeType, 0));
+ node = (pysqlite_Node*) (pysqlite_NodeType.tp_alloc(&pysqlite_NodeType, 0));
if (!node) {
return NULL;
}
@@ -46,7 +46,7 @@ Node* new_node(PyObject* key, PyObject* data)
return node;
}
-void node_dealloc(Node* self)
+void pysqlite_node_dealloc(pysqlite_Node* self)
{
Py_DECREF(self->key);
Py_DECREF(self->data);
@@ -54,7 +54,7 @@ void node_dealloc(Node* self)
self->ob_type->tp_free((PyObject*)self);
}
-int cache_init(Cache* self, PyObject* args, PyObject* kwargs)
+int pysqlite_cache_init(pysqlite_Cache* self, PyObject* args, PyObject* kwargs)
{
PyObject* factory;
int size = 10;
@@ -86,10 +86,10 @@ int cache_init(Cache* self, PyObject* args, PyObject* kwargs)
return 0;
}
-void cache_dealloc(Cache* self)
+void pysqlite_cache_dealloc(pysqlite_Cache* self)
{
- Node* node;
- Node* delete_node;
+ pysqlite_Node* node;
+ pysqlite_Node* delete_node;
if (!self->factory) {
/* constructor failed, just get out of here */
@@ -112,14 +112,14 @@ void cache_dealloc(Cache* self)
self->ob_type->tp_free((PyObject*)self);
}
-PyObject* cache_get(Cache* self, PyObject* args)
+PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args)
{
PyObject* key = args;
- Node* node;
- Node* ptr;
+ pysqlite_Node* node;
+ pysqlite_Node* ptr;
PyObject* data;
- node = (Node*)PyDict_GetItem(self->mapping, key);
+ node = (pysqlite_Node*)PyDict_GetItem(self->mapping, key);
if (node) {
/* an entry for this key already exists in the cache */
@@ -186,7 +186,7 @@ PyObject* cache_get(Cache* self, PyObject* args)
return NULL;
}
- node = new_node(key, data);
+ node = pysqlite_new_node(key, data);
if (!node) {
return NULL;
}
@@ -211,9 +211,9 @@ PyObject* cache_get(Cache* self, PyObject* args)
return node->data;
}
-PyObject* cache_display(Cache* self, PyObject* args)
+PyObject* pysqlite_cache_display(pysqlite_Cache* self, PyObject* args)
{
- Node* ptr;
+ pysqlite_Node* ptr;
PyObject* prevkey;
PyObject* nextkey;
PyObject* fmt_args;
@@ -265,20 +265,20 @@ PyObject* cache_display(Cache* self, PyObject* args)
}
static PyMethodDef cache_methods[] = {
- {"get", (PyCFunction)cache_get, METH_O,
+ {"get", (PyCFunction)pysqlite_cache_get, METH_O,
PyDoc_STR("Gets an entry from the cache or calls the factory function to produce one.")},
- {"display", (PyCFunction)cache_display, METH_NOARGS,
+ {"display", (PyCFunction)pysqlite_cache_display, METH_NOARGS,
PyDoc_STR("For debugging only.")},
{NULL, NULL}
};
-PyTypeObject NodeType = {
+PyTypeObject pysqlite_NodeType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME "Node", /* tp_name */
- sizeof(Node), /* tp_basicsize */
+ sizeof(pysqlite_Node), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)node_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_node_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -315,13 +315,13 @@ PyTypeObject NodeType = {
0 /* tp_free */
};
-PyTypeObject CacheType = {
+PyTypeObject pysqlite_CacheType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".Cache", /* tp_name */
- sizeof(Cache), /* tp_basicsize */
+ sizeof(pysqlite_Cache), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)cache_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_cache_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -352,24 +352,24 @@ PyTypeObject CacheType = {
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)cache_init, /* tp_init */
+ (initproc)pysqlite_cache_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0 /* tp_free */
};
-extern int cache_setup_types(void)
+extern int pysqlite_cache_setup_types(void)
{
int rc;
- NodeType.tp_new = PyType_GenericNew;
- CacheType.tp_new = PyType_GenericNew;
+ pysqlite_NodeType.tp_new = PyType_GenericNew;
+ pysqlite_CacheType.tp_new = PyType_GenericNew;
- rc = PyType_Ready(&NodeType);
+ rc = PyType_Ready(&pysqlite_NodeType);
if (rc < 0) {
return rc;
}
- rc = PyType_Ready(&CacheType);
+ rc = PyType_Ready(&pysqlite_CacheType);
return rc;
}
diff --git a/Modules/_sqlite/cache.h b/Modules/_sqlite/cache.h
index 1f13907..158bf5a 100644
--- a/Modules/_sqlite/cache.h
+++ b/Modules/_sqlite/cache.h
@@ -29,15 +29,15 @@
* dictionary. The list items are of type 'Node' and the dictionary has the
* nodes as values. */
-typedef struct _Node
+typedef struct _pysqlite_Node
{
PyObject_HEAD
PyObject* key;
PyObject* data;
long count;
- struct _Node* prev;
- struct _Node* next;
-} Node;
+ struct _pysqlite_Node* prev;
+ struct _pysqlite_Node* next;
+} pysqlite_Node;
typedef struct
{
@@ -50,24 +50,24 @@ typedef struct
/* the factory callable */
PyObject* factory;
- Node* first;
- Node* last;
+ pysqlite_Node* first;
+ pysqlite_Node* last;
/* if set, decrement the factory function when the Cache is deallocated.
* this is almost always desirable, but not in the pysqlite context */
int decref_factory;
-} Cache;
+} pysqlite_Cache;
-extern PyTypeObject NodeType;
-extern PyTypeObject CacheType;
+extern PyTypeObject pysqlite_NodeType;
+extern PyTypeObject pysqlite_CacheType;
-int node_init(Node* self, PyObject* args, PyObject* kwargs);
-void node_dealloc(Node* self);
+int pysqlite_node_init(pysqlite_Node* self, PyObject* args, PyObject* kwargs);
+void pysqlite_node_dealloc(pysqlite_Node* self);
-int cache_init(Cache* self, PyObject* args, PyObject* kwargs);
-void cache_dealloc(Cache* self);
-PyObject* cache_get(Cache* self, PyObject* args);
+int pysqlite_cache_init(pysqlite_Cache* self, PyObject* args, PyObject* kwargs);
+void pysqlite_cache_dealloc(pysqlite_Cache* self);
+PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args);
-int cache_setup_types(void);
+int pysqlite_cache_setup_types(void);
#endif
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index 703af15..924d582 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -32,7 +32,7 @@
#include "pythread.h"
-static int connection_set_isolation_level(Connection* self, PyObject* isolation_level);
+static int pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* isolation_level);
void _sqlite3_result_error(sqlite3_context* ctx, const char* errmsg, int len)
@@ -43,11 +43,11 @@ void _sqlite3_result_error(sqlite3_context* ctx, const char* errmsg, int len)
#if SQLITE_VERSION_NUMBER >= 3003003
sqlite3_result_error(ctx, errmsg, len);
#else
- PyErr_SetString(OperationalError, errmsg);
+ PyErr_SetString(pysqlite_OperationalError, errmsg);
#endif
}
-int connection_init(Connection* self, PyObject* args, PyObject* kwargs)
+int pysqlite_connection_init(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
static char *kwlist[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", NULL, NULL};
@@ -82,7 +82,7 @@ int connection_init(Connection* self, PyObject* args, PyObject* kwargs)
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
return -1;
}
@@ -95,10 +95,10 @@ int connection_init(Connection* self, PyObject* args, PyObject* kwargs)
Py_INCREF(isolation_level);
}
self->isolation_level = NULL;
- connection_set_isolation_level(self, isolation_level);
+ pysqlite_connection_set_isolation_level(self, isolation_level);
Py_DECREF(isolation_level);
- self->statement_cache = (Cache*)PyObject_CallFunction((PyObject*)&CacheType, "Oi", self, cached_statements);
+ self->statement_cache = (pysqlite_Cache*)PyObject_CallFunction((PyObject*)&pysqlite_CacheType, "Oi", self, cached_statements);
if (PyErr_Occurred()) {
return -1;
}
@@ -135,41 +135,41 @@ int connection_init(Connection* self, PyObject* args, PyObject* kwargs)
return -1;
}
- self->Warning = Warning;
- self->Error = Error;
- self->InterfaceError = InterfaceError;
- self->DatabaseError = DatabaseError;
- self->DataError = DataError;
- self->OperationalError = OperationalError;
- self->IntegrityError = IntegrityError;
- self->InternalError = InternalError;
- self->ProgrammingError = ProgrammingError;
- self->NotSupportedError = NotSupportedError;
+ self->Warning = pysqlite_Warning;
+ self->Error = pysqlite_Error;
+ self->InterfaceError = pysqlite_InterfaceError;
+ self->DatabaseError = pysqlite_DatabaseError;
+ self->DataError = pysqlite_DataError;
+ self->OperationalError = pysqlite_OperationalError;
+ self->IntegrityError = pysqlite_IntegrityError;
+ self->InternalError = pysqlite_InternalError;
+ self->ProgrammingError = pysqlite_ProgrammingError;
+ self->NotSupportedError = pysqlite_NotSupportedError;
return 0;
}
/* Empty the entire statement cache of this connection */
-void flush_statement_cache(Connection* self)
+void pysqlite_flush_statement_cache(pysqlite_Connection* self)
{
- Node* node;
- Statement* statement;
+ pysqlite_Node* node;
+ pysqlite_Statement* statement;
node = self->statement_cache->first;
while (node) {
- statement = (Statement*)(node->data);
- (void)statement_finalize(statement);
+ statement = (pysqlite_Statement*)(node->data);
+ (void)pysqlite_statement_finalize(statement);
node = node->next;
}
Py_DECREF(self->statement_cache);
- self->statement_cache = (Cache*)PyObject_CallFunction((PyObject*)&CacheType, "O", self);
+ self->statement_cache = (pysqlite_Cache*)PyObject_CallFunction((PyObject*)&pysqlite_CacheType, "O", self);
Py_DECREF(self);
self->statement_cache->decref_factory = 0;
}
-void reset_all_statements(Connection* self)
+void pysqlite_reset_all_statements(pysqlite_Connection* self)
{
int i;
PyObject* weakref;
@@ -179,12 +179,12 @@ void reset_all_statements(Connection* self)
weakref = PyList_GetItem(self->statements, i);
statement = PyWeakref_GetObject(weakref);
if (statement != Py_None) {
- (void)statement_reset((Statement*)statement);
+ (void)pysqlite_statement_reset((pysqlite_Statement*)statement);
}
}
}
-void connection_dealloc(Connection* self)
+void pysqlite_connection_dealloc(pysqlite_Connection* self)
{
Py_XDECREF(self->statement_cache);
@@ -208,7 +208,7 @@ void connection_dealloc(Connection* self)
self->ob_type->tp_free((PyObject*)self);
}
-PyObject* connection_cursor(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_cursor(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
static char *kwlist[] = {"factory", NULL, NULL};
PyObject* factory = NULL;
@@ -220,34 +220,34 @@ PyObject* connection_cursor(Connection* self, PyObject* args, PyObject* kwargs)
return NULL;
}
- if (!check_thread(self) || !check_connection(self)) {
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
return NULL;
}
if (factory == NULL) {
- factory = (PyObject*)&CursorType;
+ factory = (PyObject*)&pysqlite_CursorType;
}
cursor = PyObject_CallFunction(factory, "O", self);
if (cursor && self->row_factory != Py_None) {
- Py_XDECREF(((Cursor*)cursor)->row_factory);
+ Py_XDECREF(((pysqlite_Cursor*)cursor)->row_factory);
Py_INCREF(self->row_factory);
- ((Cursor*)cursor)->row_factory = self->row_factory;
+ ((pysqlite_Cursor*)cursor)->row_factory = self->row_factory;
}
return cursor;
}
-PyObject* connection_close(Connection* self, PyObject* args)
+PyObject* pysqlite_connection_close(pysqlite_Connection* self, PyObject* args)
{
int rc;
- if (!check_thread(self)) {
+ if (!pysqlite_check_thread(self)) {
return NULL;
}
- flush_statement_cache(self);
+ pysqlite_flush_statement_cache(self);
if (self->db) {
Py_BEGIN_ALLOW_THREADS
@@ -255,7 +255,7 @@ PyObject* connection_close(Connection* self, PyObject* args)
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
return NULL;
} else {
self->db = NULL;
@@ -271,17 +271,17 @@ PyObject* connection_close(Connection* self, PyObject* args)
*
* 0 => error; 1 => ok
*/
-int check_connection(Connection* con)
+int pysqlite_check_connection(pysqlite_Connection* con)
{
if (!con->db) {
- PyErr_SetString(ProgrammingError, "Cannot operate on a closed database.");
+ PyErr_SetString(pysqlite_ProgrammingError, "Cannot operate on a closed database.");
return 0;
} else {
return 1;
}
}
-PyObject* _connection_begin(Connection* self)
+PyObject* _pysqlite_connection_begin(pysqlite_Connection* self)
{
int rc;
const char* tail;
@@ -292,7 +292,7 @@ PyObject* _connection_begin(Connection* self)
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
goto error;
}
@@ -300,7 +300,7 @@ PyObject* _connection_begin(Connection* self)
if (rc == SQLITE_DONE) {
self->inTransaction = 1;
} else {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
Py_BEGIN_ALLOW_THREADS
@@ -308,7 +308,7 @@ PyObject* _connection_begin(Connection* self)
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK && !PyErr_Occurred()) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
error:
@@ -320,13 +320,13 @@ error:
}
}
-PyObject* connection_commit(Connection* self, PyObject* args)
+PyObject* pysqlite_connection_commit(pysqlite_Connection* self, PyObject* args)
{
int rc;
const char* tail;
sqlite3_stmt* statement;
- if (!check_thread(self) || !check_connection(self)) {
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
return NULL;
}
@@ -335,7 +335,7 @@ PyObject* connection_commit(Connection* self, PyObject* args)
rc = sqlite3_prepare(self->db, "COMMIT", -1, &statement, &tail);
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
goto error;
}
@@ -343,14 +343,14 @@ PyObject* connection_commit(Connection* self, PyObject* args)
if (rc == SQLITE_DONE) {
self->inTransaction = 0;
} else {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
Py_BEGIN_ALLOW_THREADS
rc = sqlite3_finalize(statement);
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK && !PyErr_Occurred()) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
}
@@ -364,24 +364,24 @@ error:
}
}
-PyObject* connection_rollback(Connection* self, PyObject* args)
+PyObject* pysqlite_connection_rollback(pysqlite_Connection* self, PyObject* args)
{
int rc;
const char* tail;
sqlite3_stmt* statement;
- if (!check_thread(self) || !check_connection(self)) {
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
return NULL;
}
if (self->inTransaction) {
- reset_all_statements(self);
+ pysqlite_reset_all_statements(self);
Py_BEGIN_ALLOW_THREADS
rc = sqlite3_prepare(self->db, "ROLLBACK", -1, &statement, &tail);
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
goto error;
}
@@ -389,14 +389,14 @@ PyObject* connection_rollback(Connection* self, PyObject* args)
if (rc == SQLITE_DONE) {
self->inTransaction = 0;
} else {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
Py_BEGIN_ALLOW_THREADS
rc = sqlite3_finalize(statement);
Py_END_ALLOW_THREADS
if (rc != SQLITE_OK && !PyErr_Occurred()) {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
}
@@ -410,7 +410,7 @@ error:
}
}
-void _set_result(sqlite3_context* context, PyObject* py_val)
+void _pysqlite_set_result(sqlite3_context* context, PyObject* py_val)
{
long longval;
const char* buffer;
@@ -445,7 +445,7 @@ void _set_result(sqlite3_context* context, PyObject* py_val)
}
}
-PyObject* _build_py_params(sqlite3_context *context, int argc, sqlite3_value** argv)
+PyObject* _pysqlite_build_py_params(sqlite3_context *context, int argc, sqlite3_value** argv)
{
PyObject* args;
int i;
@@ -512,7 +512,7 @@ PyObject* _build_py_params(sqlite3_context *context, int argc, sqlite3_value** a
return args;
}
-void _func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
+void _pysqlite_func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
{
PyObject* args;
PyObject* py_func;
@@ -524,14 +524,14 @@ void _func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
py_func = (PyObject*)sqlite3_user_data(context);
- args = _build_py_params(context, argc, argv);
+ args = _pysqlite_build_py_params(context, argc, argv);
if (args) {
py_retval = PyObject_CallObject(py_func, args);
Py_DECREF(args);
}
if (py_retval) {
- _set_result(context, py_retval);
+ _pysqlite_set_result(context, py_retval);
Py_DECREF(py_retval);
} else {
if (_enable_callback_tracebacks) {
@@ -545,7 +545,7 @@ void _func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
PyGILState_Release(threadstate);
}
-static void _step_callback(sqlite3_context *context, int argc, sqlite3_value** params)
+static void _pysqlite_step_callback(sqlite3_context *context, int argc, sqlite3_value** params)
{
PyObject* args;
PyObject* function_result = NULL;
@@ -581,7 +581,7 @@ static void _step_callback(sqlite3_context *context, int argc, sqlite3_value** p
goto error;
}
- args = _build_py_params(context, argc, params);
+ args = _pysqlite_build_py_params(context, argc, params);
if (!args) {
goto error;
}
@@ -605,7 +605,7 @@ error:
PyGILState_Release(threadstate);
}
-void _final_callback(sqlite3_context* context)
+void _pysqlite_final_callback(sqlite3_context* context)
{
PyObject* function_result = NULL;
PyObject** aggregate_instance;
@@ -634,7 +634,7 @@ void _final_callback(sqlite3_context* context)
}
_sqlite3_result_error(context, "user-defined aggregate's 'finalize' method raised error", -1);
} else {
- _set_result(context, function_result);
+ _pysqlite_set_result(context, function_result);
}
error:
@@ -644,7 +644,7 @@ error:
PyGILState_Release(threadstate);
}
-void _drop_unused_statement_references(Connection* self)
+void _pysqlite_drop_unused_statement_references(pysqlite_Connection* self)
{
PyObject* new_list;
PyObject* weakref;
@@ -676,7 +676,7 @@ void _drop_unused_statement_references(Connection* self)
self->statements = new_list;
}
-PyObject* connection_create_function(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_create_function(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
static char *kwlist[] = {"name", "narg", "func", NULL, NULL};
@@ -691,11 +691,11 @@ PyObject* connection_create_function(Connection* self, PyObject* args, PyObject*
return NULL;
}
- rc = sqlite3_create_function(self->db, name, narg, SQLITE_UTF8, (void*)func, _func_callback, NULL, NULL);
+ rc = sqlite3_create_function(self->db, name, narg, SQLITE_UTF8, (void*)func, _pysqlite_func_callback, NULL, NULL);
if (rc != SQLITE_OK) {
/* Workaround for SQLite bug: no error code or string is available here */
- PyErr_SetString(OperationalError, "Error creating function");
+ PyErr_SetString(pysqlite_OperationalError, "Error creating function");
return NULL;
} else {
PyDict_SetItem(self->function_pinboard, func, Py_None);
@@ -705,7 +705,7 @@ PyObject* connection_create_function(Connection* self, PyObject* args, PyObject*
}
}
-PyObject* connection_create_aggregate(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_create_aggregate(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* aggregate_class;
@@ -719,10 +719,10 @@ PyObject* connection_create_aggregate(Connection* self, PyObject* args, PyObject
return NULL;
}
- rc = sqlite3_create_function(self->db, name, n_arg, SQLITE_UTF8, (void*)aggregate_class, 0, &_step_callback, &_final_callback);
+ rc = sqlite3_create_function(self->db, name, n_arg, SQLITE_UTF8, (void*)aggregate_class, 0, &_pysqlite_step_callback, &_pysqlite_final_callback);
if (rc != SQLITE_OK) {
/* Workaround for SQLite bug: no error code or string is available here */
- PyErr_SetString(OperationalError, "Error creating aggregate");
+ PyErr_SetString(pysqlite_OperationalError, "Error creating aggregate");
return NULL;
} else {
PyDict_SetItem(self->function_pinboard, aggregate_class, Py_None);
@@ -732,7 +732,7 @@ PyObject* connection_create_aggregate(Connection* self, PyObject* args, PyObject
}
}
-int _authorizer_callback(void* user_arg, int action, const char* arg1, const char* arg2 , const char* dbname, const char* access_attempt_source)
+static int _authorizer_callback(void* user_arg, int action, const char* arg1, const char* arg2 , const char* dbname, const char* access_attempt_source)
{
PyObject *ret;
int rc;
@@ -762,7 +762,7 @@ int _authorizer_callback(void* user_arg, int action, const char* arg1, const cha
return rc;
}
-PyObject* connection_set_authorizer(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_set_authorizer(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* authorizer_cb;
@@ -777,7 +777,7 @@ PyObject* connection_set_authorizer(Connection* self, PyObject* args, PyObject*
rc = sqlite3_set_authorizer(self->db, _authorizer_callback, (void*)authorizer_cb);
if (rc != SQLITE_OK) {
- PyErr_SetString(OperationalError, "Error setting authorizer callback");
+ PyErr_SetString(pysqlite_OperationalError, "Error setting authorizer callback");
return NULL;
} else {
PyDict_SetItem(self->function_pinboard, authorizer_cb, Py_None);
@@ -787,11 +787,11 @@ PyObject* connection_set_authorizer(Connection* self, PyObject* args, PyObject*
}
}
-int check_thread(Connection* self)
+int pysqlite_check_thread(pysqlite_Connection* self)
{
if (self->check_same_thread) {
if (PyThread_get_thread_ident() != self->thread_ident) {
- PyErr_Format(ProgrammingError,
+ PyErr_Format(pysqlite_ProgrammingError,
"SQLite objects created in a thread can only be used in that same thread."
"The object was created in thread id %ld and this is thread id %ld",
self->thread_ident, PyThread_get_thread_ident());
@@ -803,22 +803,22 @@ int check_thread(Connection* self)
return 1;
}
-static PyObject* connection_get_isolation_level(Connection* self, void* unused)
+static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* self, void* unused)
{
Py_INCREF(self->isolation_level);
return self->isolation_level;
}
-static PyObject* connection_get_total_changes(Connection* self, void* unused)
+static PyObject* pysqlite_connection_get_total_changes(pysqlite_Connection* self, void* unused)
{
- if (!check_connection(self)) {
+ if (!pysqlite_check_connection(self)) {
return NULL;
} else {
return Py_BuildValue("i", sqlite3_total_changes(self->db));
}
}
-static int connection_set_isolation_level(Connection* self, PyObject* isolation_level)
+static int pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* isolation_level)
{
PyObject* res;
PyObject* begin_statement;
@@ -834,7 +834,7 @@ static int connection_set_isolation_level(Connection* self, PyObject* isolation_
Py_INCREF(Py_None);
self->isolation_level = Py_None;
- res = connection_commit(self, NULL);
+ res = pysqlite_connection_commit(self, NULL);
if (!res) {
return -1;
}
@@ -866,10 +866,10 @@ static int connection_set_isolation_level(Connection* self, PyObject* isolation_
return 0;
}
-PyObject* connection_call(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_call(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* sql;
- Statement* statement;
+ pysqlite_Statement* statement;
PyObject* weakref;
int rc;
@@ -877,22 +877,22 @@ PyObject* connection_call(Connection* self, PyObject* args, PyObject* kwargs)
return NULL;
}
- _drop_unused_statement_references(self);
+ _pysqlite_drop_unused_statement_references(self);
- statement = PyObject_New(Statement, &StatementType);
+ statement = PyObject_New(pysqlite_Statement, &pysqlite_StatementType);
if (!statement) {
return NULL;
}
- rc = statement_create(statement, self, sql);
+ rc = pysqlite_statement_create(statement, self, sql);
if (rc != SQLITE_OK) {
if (rc == PYSQLITE_TOO_MUCH_SQL) {
- PyErr_SetString(Warning, "You can only execute one statement at a time.");
+ PyErr_SetString(pysqlite_Warning, "You can only execute one statement at a time.");
} else if (rc == PYSQLITE_SQL_WRONG_TYPE) {
- PyErr_SetString(Warning, "SQL is of wrong type. Must be string or unicode.");
+ PyErr_SetString(pysqlite_Warning, "SQL is of wrong type. Must be string or unicode.");
} else {
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
}
Py_DECREF(statement);
@@ -918,7 +918,7 @@ error:
return (PyObject*)statement;
}
-PyObject* connection_execute(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_execute(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* cursor = 0;
PyObject* result = 0;
@@ -949,7 +949,7 @@ error:
return cursor;
}
-PyObject* connection_executemany(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_executemany(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* cursor = 0;
PyObject* result = 0;
@@ -980,7 +980,7 @@ error:
return cursor;
}
-PyObject* connection_executescript(Connection* self, PyObject* args, PyObject* kwargs)
+PyObject* pysqlite_connection_executescript(pysqlite_Connection* self, PyObject* args, PyObject* kwargs)
{
PyObject* cursor = 0;
PyObject* result = 0;
@@ -1014,7 +1014,7 @@ error:
/* ------------------------- COLLATION CODE ------------------------ */
static int
-collation_callback(
+pysqlite_collation_callback(
void* context,
int text1_length, const void* text1_data,
int text2_length, const void* text2_data)
@@ -1063,11 +1063,11 @@ finally:
}
static PyObject *
-connection_interrupt(Connection* self, PyObject* args)
+pysqlite_connection_interrupt(pysqlite_Connection* self, PyObject* args)
{
PyObject* retval = NULL;
- if (!check_connection(self)) {
+ if (!pysqlite_check_connection(self)) {
goto finally;
}
@@ -1081,7 +1081,7 @@ finally:
}
static PyObject *
-connection_create_collation(Connection* self, PyObject* args)
+pysqlite_connection_create_collation(pysqlite_Connection* self, PyObject* args)
{
PyObject* callable;
PyObject* uppercase_name = 0;
@@ -1090,7 +1090,7 @@ connection_create_collation(Connection* self, PyObject* args)
char* chk;
int rc;
- if (!check_thread(self) || !check_connection(self)) {
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
goto finally;
}
@@ -1111,7 +1111,7 @@ connection_create_collation(Connection* self, PyObject* args)
{
chk++;
} else {
- PyErr_SetString(ProgrammingError, "invalid character in collation name");
+ PyErr_SetString(pysqlite_ProgrammingError, "invalid character in collation name");
goto finally;
}
}
@@ -1131,10 +1131,10 @@ connection_create_collation(Connection* self, PyObject* args)
PyString_AsString(uppercase_name),
SQLITE_UTF8,
(callable != Py_None) ? callable : NULL,
- (callable != Py_None) ? collation_callback : NULL);
+ (callable != Py_None) ? pysqlite_collation_callback : NULL);
if (rc != SQLITE_OK) {
PyDict_DelItem(self->collations, uppercase_name);
- _seterror(self->db);
+ _pysqlite_seterror(self->db);
goto finally;
}
@@ -1155,63 +1155,63 @@ static char connection_doc[] =
PyDoc_STR("SQLite database connection object.");
static PyGetSetDef connection_getset[] = {
- {"isolation_level", (getter)connection_get_isolation_level, (setter)connection_set_isolation_level},
- {"total_changes", (getter)connection_get_total_changes, (setter)0},
+ {"isolation_level", (getter)pysqlite_connection_get_isolation_level, (setter)pysqlite_connection_set_isolation_level},
+ {"total_changes", (getter)pysqlite_connection_get_total_changes, (setter)0},
{NULL}
};
static PyMethodDef connection_methods[] = {
- {"cursor", (PyCFunction)connection_cursor, METH_VARARGS|METH_KEYWORDS,
+ {"cursor", (PyCFunction)pysqlite_connection_cursor, METH_VARARGS|METH_KEYWORDS,
PyDoc_STR("Return a cursor for the connection.")},
- {"close", (PyCFunction)connection_close, METH_NOARGS,
+ {"close", (PyCFunction)pysqlite_connection_close, METH_NOARGS,
PyDoc_STR("Closes the connection.")},
- {"commit", (PyCFunction)connection_commit, METH_NOARGS,
+ {"commit", (PyCFunction)pysqlite_connection_commit, METH_NOARGS,
PyDoc_STR("Commit the current transaction.")},
- {"rollback", (PyCFunction)connection_rollback, METH_NOARGS,
+ {"rollback", (PyCFunction)pysqlite_connection_rollback, METH_NOARGS,
PyDoc_STR("Roll back the current transaction.")},
- {"create_function", (PyCFunction)connection_create_function, METH_VARARGS|METH_KEYWORDS,
+ {"create_function", (PyCFunction)pysqlite_connection_create_function, METH_VARARGS|METH_KEYWORDS,
PyDoc_STR("Creates a new function. Non-standard.")},
- {"create_aggregate", (PyCFunction)connection_create_aggregate, METH_VARARGS|METH_KEYWORDS,
+ {"create_aggregate", (PyCFunction)pysqlite_connection_create_aggregate, METH_VARARGS|METH_KEYWORDS,
PyDoc_STR("Creates a new aggregate. Non-standard.")},
- {"set_authorizer", (PyCFunction)connection_set_authorizer, METH_VARARGS|METH_KEYWORDS,
+ {"set_authorizer", (PyCFunction)pysqlite_connection_set_authorizer, METH_VARARGS|METH_KEYWORDS,
PyDoc_STR("Sets authorizer callback. Non-standard.")},
- {"execute", (PyCFunction)connection_execute, METH_VARARGS,
+ {"execute", (PyCFunction)pysqlite_connection_execute, METH_VARARGS,
PyDoc_STR("Executes a SQL statement. Non-standard.")},
- {"executemany", (PyCFunction)connection_executemany, METH_VARARGS,
+ {"executemany", (PyCFunction)pysqlite_connection_executemany, METH_VARARGS,
PyDoc_STR("Repeatedly executes a SQL statement. Non-standard.")},
- {"executescript", (PyCFunction)connection_executescript, METH_VARARGS,
+ {"executescript", (PyCFunction)pysqlite_connection_executescript, METH_VARARGS,
PyDoc_STR("Executes a multiple SQL statements at once. Non-standard.")},
- {"create_collation", (PyCFunction)connection_create_collation, METH_VARARGS,
+ {"create_collation", (PyCFunction)pysqlite_connection_create_collation, METH_VARARGS,
PyDoc_STR("Creates a collation function. Non-standard.")},
- {"interrupt", (PyCFunction)connection_interrupt, METH_NOARGS,
+ {"interrupt", (PyCFunction)pysqlite_connection_interrupt, METH_NOARGS,
PyDoc_STR("Abort any pending database operation. Non-standard.")},
{NULL, NULL}
};
static struct PyMemberDef connection_members[] =
{
- {"Warning", T_OBJECT, offsetof(Connection, Warning), RO},
- {"Error", T_OBJECT, offsetof(Connection, Error), RO},
- {"InterfaceError", T_OBJECT, offsetof(Connection, InterfaceError), RO},
- {"DatabaseError", T_OBJECT, offsetof(Connection, DatabaseError), RO},
- {"DataError", T_OBJECT, offsetof(Connection, DataError), RO},
- {"OperationalError", T_OBJECT, offsetof(Connection, OperationalError), RO},
- {"IntegrityError", T_OBJECT, offsetof(Connection, IntegrityError), RO},
- {"InternalError", T_OBJECT, offsetof(Connection, InternalError), RO},
- {"ProgrammingError", T_OBJECT, offsetof(Connection, ProgrammingError), RO},
- {"NotSupportedError", T_OBJECT, offsetof(Connection, NotSupportedError), RO},
- {"row_factory", T_OBJECT, offsetof(Connection, row_factory)},
- {"text_factory", T_OBJECT, offsetof(Connection, text_factory)},
+ {"Warning", T_OBJECT, offsetof(pysqlite_Connection, Warning), RO},
+ {"Error", T_OBJECT, offsetof(pysqlite_Connection, Error), RO},
+ {"InterfaceError", T_OBJECT, offsetof(pysqlite_Connection, InterfaceError), RO},
+ {"DatabaseError", T_OBJECT, offsetof(pysqlite_Connection, DatabaseError), RO},
+ {"DataError", T_OBJECT, offsetof(pysqlite_Connection, DataError), RO},
+ {"OperationalError", T_OBJECT, offsetof(pysqlite_Connection, OperationalError), RO},
+ {"IntegrityError", T_OBJECT, offsetof(pysqlite_Connection, IntegrityError), RO},
+ {"InternalError", T_OBJECT, offsetof(pysqlite_Connection, InternalError), RO},
+ {"ProgrammingError", T_OBJECT, offsetof(pysqlite_Connection, ProgrammingError), RO},
+ {"NotSupportedError", T_OBJECT, offsetof(pysqlite_Connection, NotSupportedError), RO},
+ {"row_factory", T_OBJECT, offsetof(pysqlite_Connection, row_factory)},
+ {"text_factory", T_OBJECT, offsetof(pysqlite_Connection, text_factory)},
{NULL}
};
-PyTypeObject ConnectionType = {
+PyTypeObject pysqlite_ConnectionType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".Connection", /* tp_name */
- sizeof(Connection), /* tp_basicsize */
+ sizeof(pysqlite_Connection), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)connection_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_connection_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -1221,7 +1221,7 @@ PyTypeObject ConnectionType = {
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
- (ternaryfunc)connection_call, /* tp_call */
+ (ternaryfunc)pysqlite_connection_call, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
@@ -1242,14 +1242,14 @@ PyTypeObject ConnectionType = {
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)connection_init, /* tp_init */
+ (initproc)pysqlite_connection_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0 /* tp_free */
};
-extern int connection_setup_types(void)
+extern int pysqlite_connection_setup_types(void)
{
- ConnectionType.tp_new = PyType_GenericNew;
- return PyType_Ready(&ConnectionType);
+ pysqlite_ConnectionType.tp_new = PyType_GenericNew;
+ return PyType_Ready(&pysqlite_ConnectionType);
}
diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h
index 8f4d36e..21fcd2a 100644
--- a/Modules/_sqlite/connection.h
+++ b/Modules/_sqlite/connection.h
@@ -66,7 +66,7 @@ typedef struct
/* thread identification of the thread the connection was created in */
long thread_ident;
- Cache* statement_cache;
+ pysqlite_Cache* statement_cache;
/* A list of weak references to statements used within this connection */
PyObject* statements;
@@ -106,24 +106,23 @@ typedef struct
PyObject* InternalError;
PyObject* ProgrammingError;
PyObject* NotSupportedError;
-} Connection;
+} pysqlite_Connection;
-extern PyTypeObject ConnectionType;
+extern PyTypeObject pysqlite_ConnectionType;
-PyObject* connection_alloc(PyTypeObject* type, int aware);
-void connection_dealloc(Connection* self);
-PyObject* connection_cursor(Connection* self, PyObject* args, PyObject* kwargs);
-PyObject* connection_close(Connection* self, PyObject* args);
-PyObject* _connection_begin(Connection* self);
-PyObject* connection_begin(Connection* self, PyObject* args);
-PyObject* connection_commit(Connection* self, PyObject* args);
-PyObject* connection_rollback(Connection* self, PyObject* args);
-PyObject* connection_new(PyTypeObject* type, PyObject* args, PyObject* kw);
-int connection_init(Connection* self, PyObject* args, PyObject* kwargs);
+PyObject* pysqlite_connection_alloc(PyTypeObject* type, int aware);
+void pysqlite_connection_dealloc(pysqlite_Connection* self);
+PyObject* pysqlite_connection_cursor(pysqlite_Connection* self, PyObject* args, PyObject* kwargs);
+PyObject* pysqlite_connection_close(pysqlite_Connection* self, PyObject* args);
+PyObject* _pysqlite_connection_begin(pysqlite_Connection* self);
+PyObject* pysqlite_connection_commit(pysqlite_Connection* self, PyObject* args);
+PyObject* pysqlite_connection_rollback(pysqlite_Connection* self, PyObject* args);
+PyObject* pysqlite_connection_new(PyTypeObject* type, PyObject* args, PyObject* kw);
+int pysqlite_connection_init(pysqlite_Connection* self, PyObject* args, PyObject* kwargs);
-int check_thread(Connection* self);
-int check_connection(Connection* con);
+int pysqlite_check_thread(pysqlite_Connection* self);
+int pysqlite_check_connection(pysqlite_Connection* con);
-int connection_setup_types(void);
+int pysqlite_connection_setup_types(void);
#endif
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
index a2d3cf2..ca15eed 100644
--- a/Modules/_sqlite/cursor.c
+++ b/Modules/_sqlite/cursor.c
@@ -34,9 +34,9 @@
#define INT32_MAX 2147483647
#endif
-PyObject* cursor_iternext(Cursor *self);
+PyObject* pysqlite_cursor_iternext(pysqlite_Cursor* self);
-static StatementKind detect_statement_type(char* statement)
+static pysqlite_StatementKind detect_statement_type(char* statement)
{
char buf[20];
char* src;
@@ -74,11 +74,11 @@ static StatementKind detect_statement_type(char* statement)
}
}
-int cursor_init(Cursor* self, PyObject* args, PyObject* kwargs)
+int pysqlite_cursor_init(pysqlite_Cursor* self, PyObject* args, PyObject* kwargs)
{
- Connection* connection;
+ pysqlite_Connection* connection;
- if (!PyArg_ParseTuple(args, "O!", &ConnectionType, &connection))
+ if (!PyArg_ParseTuple(args, "O!", &pysqlite_ConnectionType, &connection))
{
return -1;
}
@@ -109,20 +109,20 @@ int cursor_init(Cursor* self, PyObject* args, PyObject* kwargs)
Py_INCREF(Py_None);
self->row_factory = Py_None;
- if (!check_thread(self->connection)) {
+ if (!pysqlite_check_thread(self->connection)) {
return -1;
}
return 0;
}
-void cursor_dealloc(Cursor* self)
+void pysqlite_cursor_dealloc(pysqlite_Cursor* self)
{
int rc;
/* Reset the statement if the user has not closed the cursor */
if (self->statement) {
- rc = statement_reset(self->statement);
+ rc = pysqlite_statement_reset(self->statement);
Py_DECREF(self->statement);
}
@@ -137,7 +137,7 @@ void cursor_dealloc(Cursor* self)
self->ob_type->tp_free((PyObject*)self);
}
-PyObject* _get_converter(PyObject* key)
+PyObject* _pysqlite_get_converter(PyObject* key)
{
PyObject* upcase_key;
PyObject* retval;
@@ -153,7 +153,7 @@ PyObject* _get_converter(PyObject* key)
return retval;
}
-int build_row_cast_map(Cursor* self)
+int pysqlite_build_row_cast_map(pysqlite_Cursor* self)
{
int i;
const char* type_start = (const char*)-1;
@@ -175,7 +175,7 @@ int build_row_cast_map(Cursor* self)
for (i = 0; i < sqlite3_column_count(self->statement->st); i++) {
converter = NULL;
- if (self->connection->detect_types | PARSE_COLNAMES) {
+ if (self->connection->detect_types & PARSE_COLNAMES) {
colname = sqlite3_column_name(self->statement->st, i);
if (colname) {
for (pos = colname; *pos != 0; pos++) {
@@ -190,7 +190,7 @@ int build_row_cast_map(Cursor* self)
break;
}
- converter = _get_converter(key);
+ converter = _pysqlite_get_converter(key);
Py_DECREF(key);
break;
}
@@ -198,7 +198,7 @@ int build_row_cast_map(Cursor* self)
}
}
- if (!converter && self->connection->detect_types | PARSE_DECLTYPES) {
+ if (!converter && self->connection->detect_types & PARSE_DECLTYPES) {
decltype = sqlite3_column_decltype(self->statement->st, i);
if (decltype) {
for (pos = decltype;;pos++) {
@@ -211,7 +211,7 @@ int build_row_cast_map(Cursor* self)
}
}
- converter = _get_converter(py_decltype);
+ converter = _pysqlite_get_converter(py_decltype);
Py_DECREF(py_decltype);
}
}
@@ -234,7 +234,7 @@ int build_row_cast_map(Cursor* self)
return 0;
}
-PyObject* _build_column_name(const char* colname)
+PyObject* _pysqlite_build_column_name(const char* colname)
{
const char* pos;
@@ -253,7 +253,7 @@ PyObject* _build_column_name(const char* colname)
}
}
-PyObject* unicode_from_string(const char* val_str, int optimize)
+PyObject* pysqlite_unicode_from_string(const char* val_str, int optimize)
{
const char* check;
int is_ascii = 0;
@@ -285,7 +285,7 @@ PyObject* unicode_from_string(const char* val_str, int optimize)
* Precondidition:
* - sqlite3_step() has been called before and it returned SQLITE_ROW.
*/
-PyObject* _fetch_one_row(Cursor* self)
+PyObject* _pysqlite_fetch_one_row(pysqlite_Cursor* self)
{
int i, numcols;
PyObject* row;
@@ -356,10 +356,10 @@ PyObject* _fetch_one_row(Cursor* self)
} else if (coltype == SQLITE_TEXT) {
val_str = (const char*)sqlite3_column_text(self->statement->st, i);
if ((self->connection->text_factory == (PyObject*)&PyUnicode_Type)
- || (self->connection->text_factory == OptimizedUnicode)) {
+ || (self->connection->text_factory == pysqlite_OptimizedUnicode)) {
- converted = unicode_from_string(val_str,
- self->connection->text_factory == OptimizedUnicode ? 1 : 0);
+ converted = pysqlite_unicode_from_string(val_str,
+ self->connection->text_factory == pysqlite_OptimizedUnicode ? 1 : 0);
if (!converted) {
colname = sqlite3_column_name(self->statement->st, i);
@@ -368,7 +368,7 @@ PyObject* _fetch_one_row(Cursor* self)
}
PyOS_snprintf(buf, sizeof(buf) - 1, "Could not decode to UTF-8 column '%s' with text '%s'",
colname , val_str);
- PyErr_SetString(OperationalError, buf);
+ PyErr_SetString(pysqlite_OperationalError, buf);
}
} else if (self->connection->text_factory == (PyObject*)&PyString_Type) {
converted = PyString_FromString(val_str);
@@ -406,7 +406,7 @@ PyObject* _fetch_one_row(Cursor* self)
return row;
}
-PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
+PyObject* _pysqlite_query_execute(pysqlite_Cursor* self, int multiple, PyObject* args)
{
PyObject* operation;
PyObject* operation_bytestr = NULL;
@@ -425,7 +425,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
PyObject* second_argument = NULL;
long rowcount = 0;
- if (!check_thread(self->connection) || !check_connection(self->connection)) {
+ if (!pysqlite_check_thread(self->connection) || !pysqlite_check_connection(self->connection)) {
return NULL;
}
@@ -492,7 +492,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
if (self->statement != NULL) {
/* There is an active statement */
- rc = statement_reset(self->statement);
+ rc = pysqlite_statement_reset(self->statement);
}
if (PyString_Check(operation)) {
@@ -525,7 +525,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
case STATEMENT_INSERT:
case STATEMENT_REPLACE:
if (!self->connection->inTransaction) {
- result = _connection_begin(self->connection);
+ result = _pysqlite_connection_begin(self->connection);
if (!result) {
goto error;
}
@@ -536,7 +536,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
/* it's a DDL statement or something similar
- we better COMMIT first so it works for all cases */
if (self->connection->inTransaction) {
- result = connection_commit(self->connection, NULL);
+ result = pysqlite_connection_commit(self->connection, NULL);
if (!result) {
goto error;
}
@@ -545,7 +545,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
break;
case STATEMENT_SELECT:
if (multiple) {
- PyErr_SetString(ProgrammingError,
+ PyErr_SetString(pysqlite_ProgrammingError,
"You cannot execute SELECT statements in executemany().");
goto error;
}
@@ -563,11 +563,11 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
}
if (self->statement) {
- (void)statement_reset(self->statement);
+ (void)pysqlite_statement_reset(self->statement);
Py_DECREF(self->statement);
}
- self->statement = (Statement*)cache_get(self->connection->statement_cache, func_args);
+ self->statement = (pysqlite_Statement*)pysqlite_cache_get(self->connection->statement_cache, func_args);
Py_DECREF(func_args);
if (!self->statement) {
@@ -576,19 +576,19 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
if (self->statement->in_use) {
Py_DECREF(self->statement);
- self->statement = PyObject_New(Statement, &StatementType);
+ self->statement = PyObject_New(pysqlite_Statement, &pysqlite_StatementType);
if (!self->statement) {
goto error;
}
- rc = statement_create(self->statement, self->connection, operation);
+ rc = pysqlite_statement_create(self->statement, self->connection, operation);
if (rc != SQLITE_OK) {
self->statement = 0;
goto error;
}
}
- statement_reset(self->statement);
- statement_mark_dirty(self->statement);
+ pysqlite_statement_reset(self->statement);
+ pysqlite_statement_mark_dirty(self->statement);
while (1) {
parameters = PyIter_Next(parameters_iter);
@@ -596,27 +596,37 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
break;
}
- statement_mark_dirty(self->statement);
+ pysqlite_statement_mark_dirty(self->statement);
- statement_bind_parameters(self->statement, parameters);
+ pysqlite_statement_bind_parameters(self->statement, parameters);
if (PyErr_Occurred()) {
goto error;
}
- if (build_row_cast_map(self) != 0) {
- PyErr_SetString(OperationalError, "Error while building row_cast_map");
+ if (pysqlite_build_row_cast_map(self) != 0) {
+ PyErr_SetString(pysqlite_OperationalError, "Error while building row_cast_map");
goto error;
}
- rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
- if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
- rc = statement_reset(self->statement);
+ /* Keep trying the SQL statement until the schema stops changing. */
+ while (1) {
+ /* Actually execute the SQL statement. */
+ rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+ if (rc == SQLITE_DONE || rc == SQLITE_ROW) {
+ /* If it worked, let's get out of the loop */
+ break;
+ }
+ /* Something went wrong. Re-set the statement and try again. */
+ rc = pysqlite_statement_reset(self->statement);
if (rc == SQLITE_SCHEMA) {
- rc = statement_recompile(self->statement, parameters);
+ /* If this was a result of the schema changing, let's try
+ again. */
+ rc = pysqlite_statement_recompile(self->statement, parameters);
if (rc == SQLITE_OK) {
- rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+ continue;
} else {
- _seterror(self->connection->db);
+ /* If the database gave us an error, promote it to Python. */
+ _pysqlite_seterror(self->connection->db);
goto error;
}
} else {
@@ -628,7 +638,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
PyErr_Clear();
}
}
- _seterror(self->connection->db);
+ _pysqlite_seterror(self->connection->db);
goto error;
}
}
@@ -649,7 +659,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
if (!descriptor) {
goto error;
}
- PyTuple_SetItem(descriptor, 0, _build_column_name(sqlite3_column_name(self->statement->st, i)));
+ PyTuple_SetItem(descriptor, 0, _pysqlite_build_column_name(sqlite3_column_name(self->statement->st, i)));
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 1, Py_None);
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 2, Py_None);
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 3, Py_None);
@@ -663,13 +673,13 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
if (rc == SQLITE_ROW) {
if (multiple) {
- PyErr_SetString(ProgrammingError, "executemany() can only execute DML statements.");
+ PyErr_SetString(pysqlite_ProgrammingError, "executemany() can only execute DML statements.");
goto error;
}
- self->next_row = _fetch_one_row(self);
+ self->next_row = _pysqlite_fetch_one_row(self);
} else if (rc == SQLITE_DONE && !multiple) {
- statement_reset(self->statement);
+ pysqlite_statement_reset(self->statement);
Py_DECREF(self->statement);
self->statement = 0;
}
@@ -698,7 +708,7 @@ PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
}
if (multiple) {
- rc = statement_reset(self->statement);
+ rc = pysqlite_statement_reset(self->statement);
}
Py_XDECREF(parameters);
}
@@ -717,17 +727,17 @@ error:
}
}
-PyObject* cursor_execute(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_execute(pysqlite_Cursor* self, PyObject* args)
{
- return _query_execute(self, 0, args);
+ return _pysqlite_query_execute(self, 0, args);
}
-PyObject* cursor_executemany(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_executemany(pysqlite_Cursor* self, PyObject* args)
{
- return _query_execute(self, 1, args);
+ return _pysqlite_query_execute(self, 1, args);
}
-PyObject* cursor_executescript(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_executescript(pysqlite_Cursor* self, PyObject* args)
{
PyObject* script_obj;
PyObject* script_str = NULL;
@@ -741,7 +751,7 @@ PyObject* cursor_executescript(Cursor* self, PyObject* args)
return NULL;
}
- if (!check_thread(self->connection) || !check_connection(self->connection)) {
+ if (!pysqlite_check_thread(self->connection) || !pysqlite_check_connection(self->connection)) {
return NULL;
}
@@ -760,7 +770,7 @@ PyObject* cursor_executescript(Cursor* self, PyObject* args)
}
/* commit first */
- result = connection_commit(self->connection, NULL);
+ result = pysqlite_connection_commit(self->connection, NULL);
if (!result) {
goto error;
}
@@ -778,7 +788,7 @@ PyObject* cursor_executescript(Cursor* self, PyObject* args)
&statement,
&script_cstr);
if (rc != SQLITE_OK) {
- _seterror(self->connection->db);
+ _pysqlite_seterror(self->connection->db);
goto error;
}
@@ -790,13 +800,13 @@ PyObject* cursor_executescript(Cursor* self, PyObject* args)
if (rc != SQLITE_DONE) {
(void)sqlite3_finalize(statement);
- _seterror(self->connection->db);
+ _pysqlite_seterror(self->connection->db);
goto error;
}
rc = sqlite3_finalize(statement);
if (rc != SQLITE_OK) {
- _seterror(self->connection->db);
+ _pysqlite_seterror(self->connection->db);
goto error;
}
}
@@ -805,7 +815,7 @@ error:
Py_XDECREF(script_str);
if (!statement_completed) {
- PyErr_SetString(ProgrammingError, "you did not provide a complete SQL statement");
+ PyErr_SetString(pysqlite_ProgrammingError, "you did not provide a complete SQL statement");
}
if (PyErr_Occurred()) {
@@ -816,25 +826,25 @@ error:
}
}
-PyObject* cursor_getiter(Cursor *self)
+PyObject* pysqlite_cursor_getiter(pysqlite_Cursor *self)
{
Py_INCREF(self);
return (PyObject*)self;
}
-PyObject* cursor_iternext(Cursor *self)
+PyObject* pysqlite_cursor_iternext(pysqlite_Cursor *self)
{
PyObject* next_row_tuple;
PyObject* next_row;
int rc;
- if (!check_thread(self->connection) || !check_connection(self->connection)) {
+ if (!pysqlite_check_thread(self->connection) || !pysqlite_check_connection(self->connection)) {
return NULL;
}
if (!self->next_row) {
if (self->statement) {
- (void)statement_reset(self->statement);
+ (void)pysqlite_statement_reset(self->statement);
Py_DECREF(self->statement);
self->statement = NULL;
}
@@ -851,25 +861,27 @@ PyObject* cursor_iternext(Cursor *self)
next_row = next_row_tuple;
}
- rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
- if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
- Py_DECREF(next_row);
- _seterror(self->connection->db);
- return NULL;
- }
+ if (self->statement) {
+ rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+ if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
+ Py_DECREF(next_row);
+ _pysqlite_seterror(self->connection->db);
+ return NULL;
+ }
- if (rc == SQLITE_ROW) {
- self->next_row = _fetch_one_row(self);
+ if (rc == SQLITE_ROW) {
+ self->next_row = _pysqlite_fetch_one_row(self);
+ }
}
return next_row;
}
-PyObject* cursor_fetchone(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_fetchone(pysqlite_Cursor* self, PyObject* args)
{
PyObject* row;
- row = cursor_iternext(self);
+ row = pysqlite_cursor_iternext(self);
if (!row && !PyErr_Occurred()) {
Py_INCREF(Py_None);
return Py_None;
@@ -878,7 +890,7 @@ PyObject* cursor_fetchone(Cursor* self, PyObject* args)
return row;
}
-PyObject* cursor_fetchmany(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_fetchmany(pysqlite_Cursor* self, PyObject* args)
{
PyObject* row;
PyObject* list;
@@ -898,7 +910,7 @@ PyObject* cursor_fetchmany(Cursor* self, PyObject* args)
row = Py_None;
while (row) {
- row = cursor_iternext(self);
+ row = pysqlite_cursor_iternext(self);
if (row) {
PyList_Append(list, row);
Py_DECREF(row);
@@ -919,7 +931,7 @@ PyObject* cursor_fetchmany(Cursor* self, PyObject* args)
}
}
-PyObject* cursor_fetchall(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_fetchall(pysqlite_Cursor* self, PyObject* args)
{
PyObject* row;
PyObject* list;
@@ -933,7 +945,7 @@ PyObject* cursor_fetchall(Cursor* self, PyObject* args)
row = (PyObject*)Py_None;
while (row) {
- row = cursor_iternext(self);
+ row = pysqlite_cursor_iternext(self);
if (row) {
PyList_Append(list, row);
Py_DECREF(row);
@@ -948,21 +960,21 @@ PyObject* cursor_fetchall(Cursor* self, PyObject* args)
}
}
-PyObject* pysqlite_noop(Connection* self, PyObject* args)
+PyObject* pysqlite_noop(pysqlite_Connection* self, PyObject* args)
{
/* don't care, return None */
Py_INCREF(Py_None);
return Py_None;
}
-PyObject* cursor_close(Cursor* self, PyObject* args)
+PyObject* pysqlite_cursor_close(pysqlite_Cursor* self, PyObject* args)
{
- if (!check_thread(self->connection) || !check_connection(self->connection)) {
+ if (!pysqlite_check_thread(self->connection) || !pysqlite_check_connection(self->connection)) {
return NULL;
}
if (self->statement) {
- (void)statement_reset(self->statement);
+ (void)pysqlite_statement_reset(self->statement);
Py_DECREF(self->statement);
self->statement = 0;
}
@@ -972,19 +984,19 @@ PyObject* cursor_close(Cursor* self, PyObject* args)
}
static PyMethodDef cursor_methods[] = {
- {"execute", (PyCFunction)cursor_execute, METH_VARARGS,
+ {"execute", (PyCFunction)pysqlite_cursor_execute, METH_VARARGS,
PyDoc_STR("Executes a SQL statement.")},
- {"executemany", (PyCFunction)cursor_executemany, METH_VARARGS,
+ {"executemany", (PyCFunction)pysqlite_cursor_executemany, METH_VARARGS,
PyDoc_STR("Repeatedly executes a SQL statement.")},
- {"executescript", (PyCFunction)cursor_executescript, METH_VARARGS,
+ {"executescript", (PyCFunction)pysqlite_cursor_executescript, METH_VARARGS,
PyDoc_STR("Executes a multiple SQL statements at once. Non-standard.")},
- {"fetchone", (PyCFunction)cursor_fetchone, METH_NOARGS,
+ {"fetchone", (PyCFunction)pysqlite_cursor_fetchone, METH_NOARGS,
PyDoc_STR("Fetches several rows from the resultset.")},
- {"fetchmany", (PyCFunction)cursor_fetchmany, METH_VARARGS,
+ {"fetchmany", (PyCFunction)pysqlite_cursor_fetchmany, METH_VARARGS,
PyDoc_STR("Fetches all rows from the resultset.")},
- {"fetchall", (PyCFunction)cursor_fetchall, METH_NOARGS,
+ {"fetchall", (PyCFunction)pysqlite_cursor_fetchall, METH_NOARGS,
PyDoc_STR("Fetches one row from the resultset.")},
- {"close", (PyCFunction)cursor_close, METH_NOARGS,
+ {"close", (PyCFunction)pysqlite_cursor_close, METH_NOARGS,
PyDoc_STR("Closes the cursor.")},
{"setinputsizes", (PyCFunction)pysqlite_noop, METH_VARARGS,
PyDoc_STR("Required by DB-API. Does nothing in pysqlite.")},
@@ -995,25 +1007,25 @@ static PyMethodDef cursor_methods[] = {
static struct PyMemberDef cursor_members[] =
{
- {"connection", T_OBJECT, offsetof(Cursor, connection), RO},
- {"description", T_OBJECT, offsetof(Cursor, description), RO},
- {"arraysize", T_INT, offsetof(Cursor, arraysize), 0},
- {"lastrowid", T_OBJECT, offsetof(Cursor, lastrowid), RO},
- {"rowcount", T_OBJECT, offsetof(Cursor, rowcount), RO},
- {"row_factory", T_OBJECT, offsetof(Cursor, row_factory), 0},
+ {"connection", T_OBJECT, offsetof(pysqlite_Cursor, connection), RO},
+ {"description", T_OBJECT, offsetof(pysqlite_Cursor, description), RO},
+ {"arraysize", T_INT, offsetof(pysqlite_Cursor, arraysize), 0},
+ {"lastrowid", T_OBJECT, offsetof(pysqlite_Cursor, lastrowid), RO},
+ {"rowcount", T_OBJECT, offsetof(pysqlite_Cursor, rowcount), RO},
+ {"row_factory", T_OBJECT, offsetof(pysqlite_Cursor, row_factory), 0},
{NULL}
};
static char cursor_doc[] =
PyDoc_STR("SQLite database cursor class.");
-PyTypeObject CursorType = {
+PyTypeObject pysqlite_CursorType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".Cursor", /* tp_name */
- sizeof(Cursor), /* tp_basicsize */
+ sizeof(pysqlite_Cursor), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)cursor_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_cursor_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -1034,8 +1046,8 @@ PyTypeObject CursorType = {
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
- (getiterfunc)cursor_getiter, /* tp_iter */
- (iternextfunc)cursor_iternext, /* tp_iternext */
+ (getiterfunc)pysqlite_cursor_getiter, /* tp_iter */
+ (iternextfunc)pysqlite_cursor_iternext, /* tp_iternext */
cursor_methods, /* tp_methods */
cursor_members, /* tp_members */
0, /* tp_getset */
@@ -1044,14 +1056,14 @@ PyTypeObject CursorType = {
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)cursor_init, /* tp_init */
+ (initproc)pysqlite_cursor_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0 /* tp_free */
};
-extern int cursor_setup_types(void)
+extern int pysqlite_cursor_setup_types(void)
{
- CursorType.tp_new = PyType_GenericNew;
- return PyType_Ready(&CursorType);
+ pysqlite_CursorType.tp_new = PyType_GenericNew;
+ return PyType_Ready(&pysqlite_CursorType);
}
diff --git a/Modules/_sqlite/cursor.h b/Modules/_sqlite/cursor.h
index 831ff81..5fce64a 100644
--- a/Modules/_sqlite/cursor.h
+++ b/Modules/_sqlite/cursor.h
@@ -32,40 +32,40 @@
typedef struct
{
PyObject_HEAD
- Connection* connection;
+ pysqlite_Connection* connection;
PyObject* description;
PyObject* row_cast_map;
int arraysize;
PyObject* lastrowid;
PyObject* rowcount;
PyObject* row_factory;
- Statement* statement;
+ pysqlite_Statement* statement;
/* the next row to be returned, NULL if no next row available */
PyObject* next_row;
-} Cursor;
+} pysqlite_Cursor;
typedef enum {
STATEMENT_INVALID, STATEMENT_INSERT, STATEMENT_DELETE,
STATEMENT_UPDATE, STATEMENT_REPLACE, STATEMENT_SELECT,
STATEMENT_OTHER
-} StatementKind;
+} pysqlite_StatementKind;
-extern PyTypeObject CursorType;
+extern PyTypeObject pysqlite_CursorType;
-int cursor_init(Cursor* self, PyObject* args, PyObject* kwargs);
-void cursor_dealloc(Cursor* self);
-PyObject* cursor_execute(Cursor* self, PyObject* args);
-PyObject* cursor_executemany(Cursor* self, PyObject* args);
-PyObject* cursor_getiter(Cursor *self);
-PyObject* cursor_iternext(Cursor *self);
-PyObject* cursor_fetchone(Cursor* self, PyObject* args);
-PyObject* cursor_fetchmany(Cursor* self, PyObject* args);
-PyObject* cursor_fetchall(Cursor* self, PyObject* args);
-PyObject* pysqlite_noop(Connection* self, PyObject* args);
-PyObject* cursor_close(Cursor* self, PyObject* args);
+int pysqlite_cursor_init(pysqlite_Cursor* self, PyObject* args, PyObject* kwargs);
+void pysqlite_cursor_dealloc(pysqlite_Cursor* self);
+PyObject* pysqlite_cursor_execute(pysqlite_Cursor* self, PyObject* args);
+PyObject* pysqlite_cursor_executemany(pysqlite_Cursor* self, PyObject* args);
+PyObject* pysqlite_cursor_getiter(pysqlite_Cursor *self);
+PyObject* pysqlite_cursor_iternext(pysqlite_Cursor *self);
+PyObject* pysqlite_cursor_fetchone(pysqlite_Cursor* self, PyObject* args);
+PyObject* pysqlite_cursor_fetchmany(pysqlite_Cursor* self, PyObject* args);
+PyObject* pysqlite_cursor_fetchall(pysqlite_Cursor* self, PyObject* args);
+PyObject* pysqlite_noop(pysqlite_Connection* self, PyObject* args);
+PyObject* pysqlite_cursor_close(pysqlite_Cursor* self, PyObject* args);
-int cursor_setup_types(void);
+int pysqlite_cursor_setup_types(void);
#define UNKNOWN (-1)
#endif
diff --git a/Modules/_sqlite/microprotocols.c b/Modules/_sqlite/microprotocols.c
index 4956ac0..5a78917 100644
--- a/Modules/_sqlite/microprotocols.c
+++ b/Modules/_sqlite/microprotocols.c
@@ -57,7 +57,7 @@ microprotocols_add(PyTypeObject *type, PyObject *proto, PyObject *cast)
PyObject* key;
int rc;
- if (proto == NULL) proto = (PyObject*)&SQLitePrepareProtocolType;
+ if (proto == NULL) proto = (PyObject*)&pysqlite_PrepareProtocolType;
key = Py_BuildValue("(OO)", (PyObject*)type, proto);
if (!key) {
@@ -78,7 +78,7 @@ microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
PyObject *adapter, *key;
/* we don't check for exact type conformance as specified in PEP 246
- because the SQLitePrepareProtocolType type is abstract and there is no
+ because the pysqlite_PrepareProtocolType type is abstract and there is no
way to get a quotable object to be its instance */
/* look for an adapter in the registry */
@@ -125,17 +125,17 @@ microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
}
/* else set the right exception and return NULL */
- PyErr_SetString(ProgrammingError, "can't adapt");
+ PyErr_SetString(pysqlite_ProgrammingError, "can't adapt");
return NULL;
}
/** module-level functions **/
PyObject *
-psyco_microprotocols_adapt(Cursor *self, PyObject *args)
+psyco_microprotocols_adapt(pysqlite_Cursor *self, PyObject *args)
{
PyObject *obj, *alt = NULL;
- PyObject *proto = (PyObject*)&SQLitePrepareProtocolType;
+ PyObject *proto = (PyObject*)&pysqlite_PrepareProtocolType;
if (!PyArg_ParseTuple(args, "O|OO", &obj, &proto, &alt)) return NULL;
return microprotocols_adapt(obj, proto, alt);
diff --git a/Modules/_sqlite/microprotocols.h b/Modules/_sqlite/microprotocols.h
index f601bb3..d84ec93 100644
--- a/Modules/_sqlite/microprotocols.h
+++ b/Modules/_sqlite/microprotocols.h
@@ -52,7 +52,7 @@ extern PyObject *microprotocols_adapt(
PyObject *obj, PyObject *proto, PyObject *alt);
extern PyObject *
- psyco_microprotocols_adapt(Cursor* self, PyObject *args);
+ psyco_microprotocols_adapt(pysqlite_Cursor* self, PyObject *args);
#define psyco_microprotocols_adapt_doc \
"adapt(obj, protocol, alternate) -> adapt obj to given protocol. Non-standard."
diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c
index 606454c..8844d81 100644
--- a/Modules/_sqlite/module.c
+++ b/Modules/_sqlite/module.c
@@ -35,9 +35,9 @@
/* static objects at module-level */
-PyObject* Error, *Warning, *InterfaceError, *DatabaseError, *InternalError,
- *OperationalError, *ProgrammingError, *IntegrityError, *DataError,
- *NotSupportedError, *OptimizedUnicode;
+PyObject* pysqlite_Error, *pysqlite_Warning, *pysqlite_InterfaceError, *pysqlite_DatabaseError,
+ *pysqlite_InternalError, *pysqlite_OperationalError, *pysqlite_ProgrammingError,
+ *pysqlite_IntegrityError, *pysqlite_DataError, *pysqlite_NotSupportedError, *pysqlite_OptimizedUnicode;
PyObject* converters;
int _enable_callback_tracebacks;
@@ -67,7 +67,7 @@ static PyObject* module_connect(PyObject* self, PyObject* args, PyObject*
}
if (factory == NULL) {
- factory = (PyObject*)&ConnectionType;
+ factory = (PyObject*)&pysqlite_ConnectionType;
}
result = PyObject_Call(factory, args, kwargs);
@@ -115,7 +115,7 @@ static PyObject* module_enable_shared_cache(PyObject* self, PyObject* args, PyOb
rc = sqlite3_enable_shared_cache(do_enable);
if (rc != SQLITE_OK) {
- PyErr_SetString(OperationalError, "Changing the shared_cache flag failed");
+ PyErr_SetString(pysqlite_OperationalError, "Changing the shared_cache flag failed");
return NULL;
} else {
Py_INCREF(Py_None);
@@ -133,7 +133,7 @@ static PyObject* module_register_adapter(PyObject* self, PyObject* args, PyObjec
return NULL;
}
- microprotocols_add(type, (PyObject*)&SQLitePrepareProtocolType, caster);
+ microprotocols_add(type, (PyObject*)&pysqlite_PrepareProtocolType, caster);
Py_INCREF(Py_None);
return Py_None;
@@ -141,36 +141,29 @@ static PyObject* module_register_adapter(PyObject* self, PyObject* args, PyObjec
static PyObject* module_register_converter(PyObject* self, PyObject* args, PyObject* kwargs)
{
- char* orig_name;
- char* name = NULL;
- char* c;
+ PyObject* orig_name;
+ PyObject* name = NULL;
PyObject* callable;
PyObject* retval = NULL;
- if (!PyArg_ParseTuple(args, "sO", &orig_name, &callable)) {
+ if (!PyArg_ParseTuple(args, "SO", &orig_name, &callable)) {
return NULL;
}
- /* convert the name to lowercase */
- name = PyMem_Malloc(strlen(orig_name) + 2);
+ /* convert the name to upper case */
+ name = PyObject_CallMethod(orig_name, "upper", "");
if (!name) {
goto error;
}
- strcpy(name, orig_name);
- for (c = name; *c != (char)0; c++) {
- *c = (*c) & 0xDF;
- }
- if (PyDict_SetItemString(converters, name, callable) != 0) {
+ if (PyDict_SetItem(converters, name, callable) != 0) {
goto error;
}
Py_INCREF(Py_None);
retval = Py_None;
error:
- if (name) {
- PyMem_Free(name);
- }
+ Py_XDECREF(name);
return retval;
}
@@ -184,7 +177,7 @@ static PyObject* enable_callback_tracebacks(PyObject* self, PyObject* args, PyOb
return Py_None;
}
-void converters_init(PyObject* dict)
+static void converters_init(PyObject* dict)
{
converters = PyDict_New();
if (!converters) {
@@ -265,28 +258,28 @@ PyMODINIT_FUNC init_sqlite3(void)
module = Py_InitModule("_sqlite3", module_methods);
if (!module ||
- (row_setup_types() < 0) ||
- (cursor_setup_types() < 0) ||
- (connection_setup_types() < 0) ||
- (cache_setup_types() < 0) ||
- (statement_setup_types() < 0) ||
- (prepare_protocol_setup_types() < 0)
+ (pysqlite_row_setup_types() < 0) ||
+ (pysqlite_cursor_setup_types() < 0) ||
+ (pysqlite_connection_setup_types() < 0) ||
+ (pysqlite_cache_setup_types() < 0) ||
+ (pysqlite_statement_setup_types() < 0) ||
+ (pysqlite_prepare_protocol_setup_types() < 0)
) {
return;
}
- Py_INCREF(&ConnectionType);
- PyModule_AddObject(module, "Connection", (PyObject*) &ConnectionType);
- Py_INCREF(&CursorType);
- PyModule_AddObject(module, "Cursor", (PyObject*) &CursorType);
- Py_INCREF(&CacheType);
- PyModule_AddObject(module, "Statement", (PyObject*)&StatementType);
- Py_INCREF(&StatementType);
- PyModule_AddObject(module, "Cache", (PyObject*) &CacheType);
- Py_INCREF(&SQLitePrepareProtocolType);
- PyModule_AddObject(module, "PrepareProtocol", (PyObject*) &SQLitePrepareProtocolType);
- Py_INCREF(&RowType);
- PyModule_AddObject(module, "Row", (PyObject*) &RowType);
+ Py_INCREF(&pysqlite_ConnectionType);
+ PyModule_AddObject(module, "Connection", (PyObject*) &pysqlite_ConnectionType);
+ Py_INCREF(&pysqlite_CursorType);
+ PyModule_AddObject(module, "Cursor", (PyObject*) &pysqlite_CursorType);
+ Py_INCREF(&pysqlite_CacheType);
+ PyModule_AddObject(module, "Statement", (PyObject*)&pysqlite_StatementType);
+ Py_INCREF(&pysqlite_StatementType);
+ PyModule_AddObject(module, "Cache", (PyObject*) &pysqlite_CacheType);
+ Py_INCREF(&pysqlite_PrepareProtocolType);
+ PyModule_AddObject(module, "PrepareProtocol", (PyObject*) &pysqlite_PrepareProtocolType);
+ Py_INCREF(&pysqlite_RowType);
+ PyModule_AddObject(module, "Row", (PyObject*) &pysqlite_RowType);
if (!(dict = PyModule_GetDict(module))) {
goto error;
@@ -294,67 +287,67 @@ PyMODINIT_FUNC init_sqlite3(void)
/*** Create DB-API Exception hierarchy */
- if (!(Error = PyErr_NewException(MODULE_NAME ".Error", PyExc_StandardError, NULL))) {
+ if (!(pysqlite_Error = PyErr_NewException(MODULE_NAME ".Error", PyExc_StandardError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "Error", Error);
+ PyDict_SetItemString(dict, "Error", pysqlite_Error);
- if (!(Warning = PyErr_NewException(MODULE_NAME ".Warning", PyExc_StandardError, NULL))) {
+ if (!(pysqlite_Warning = PyErr_NewException(MODULE_NAME ".Warning", PyExc_StandardError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "Warning", Warning);
+ PyDict_SetItemString(dict, "Warning", pysqlite_Warning);
/* Error subclasses */
- if (!(InterfaceError = PyErr_NewException(MODULE_NAME ".InterfaceError", Error, NULL))) {
+ if (!(pysqlite_InterfaceError = PyErr_NewException(MODULE_NAME ".InterfaceError", pysqlite_Error, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "InterfaceError", InterfaceError);
+ PyDict_SetItemString(dict, "InterfaceError", pysqlite_InterfaceError);
- if (!(DatabaseError = PyErr_NewException(MODULE_NAME ".DatabaseError", Error, NULL))) {
+ if (!(pysqlite_DatabaseError = PyErr_NewException(MODULE_NAME ".DatabaseError", pysqlite_Error, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "DatabaseError", DatabaseError);
+ PyDict_SetItemString(dict, "DatabaseError", pysqlite_DatabaseError);
- /* DatabaseError subclasses */
+ /* pysqlite_DatabaseError subclasses */
- if (!(InternalError = PyErr_NewException(MODULE_NAME ".InternalError", DatabaseError, NULL))) {
+ if (!(pysqlite_InternalError = PyErr_NewException(MODULE_NAME ".InternalError", pysqlite_DatabaseError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "InternalError", InternalError);
+ PyDict_SetItemString(dict, "InternalError", pysqlite_InternalError);
- if (!(OperationalError = PyErr_NewException(MODULE_NAME ".OperationalError", DatabaseError, NULL))) {
+ if (!(pysqlite_OperationalError = PyErr_NewException(MODULE_NAME ".OperationalError", pysqlite_DatabaseError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "OperationalError", OperationalError);
+ PyDict_SetItemString(dict, "OperationalError", pysqlite_OperationalError);
- if (!(ProgrammingError = PyErr_NewException(MODULE_NAME ".ProgrammingError", DatabaseError, NULL))) {
+ if (!(pysqlite_ProgrammingError = PyErr_NewException(MODULE_NAME ".ProgrammingError", pysqlite_DatabaseError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "ProgrammingError", ProgrammingError);
+ PyDict_SetItemString(dict, "ProgrammingError", pysqlite_ProgrammingError);
- if (!(IntegrityError = PyErr_NewException(MODULE_NAME ".IntegrityError", DatabaseError,NULL))) {
+ if (!(pysqlite_IntegrityError = PyErr_NewException(MODULE_NAME ".IntegrityError", pysqlite_DatabaseError,NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "IntegrityError", IntegrityError);
+ PyDict_SetItemString(dict, "IntegrityError", pysqlite_IntegrityError);
- if (!(DataError = PyErr_NewException(MODULE_NAME ".DataError", DatabaseError, NULL))) {
+ if (!(pysqlite_DataError = PyErr_NewException(MODULE_NAME ".DataError", pysqlite_DatabaseError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "DataError", DataError);
+ PyDict_SetItemString(dict, "DataError", pysqlite_DataError);
- if (!(NotSupportedError = PyErr_NewException(MODULE_NAME ".NotSupportedError", DatabaseError, NULL))) {
+ if (!(pysqlite_NotSupportedError = PyErr_NewException(MODULE_NAME ".NotSupportedError", pysqlite_DatabaseError, NULL))) {
goto error;
}
- PyDict_SetItemString(dict, "NotSupportedError", NotSupportedError);
+ PyDict_SetItemString(dict, "NotSupportedError", pysqlite_NotSupportedError);
- /* We just need "something" unique for OptimizedUnicode. It does not really
+ /* We just need "something" unique for pysqlite_OptimizedUnicode. It does not really
* need to be a string subclass. Just anything that can act as a special
* marker for us. So I pulled PyCell_Type out of my magic hat.
*/
Py_INCREF((PyObject*)&PyCell_Type);
- OptimizedUnicode = (PyObject*)&PyCell_Type;
- PyDict_SetItemString(dict, "OptimizedUnicode", OptimizedUnicode);
+ pysqlite_OptimizedUnicode = (PyObject*)&PyCell_Type;
+ PyDict_SetItemString(dict, "OptimizedUnicode", pysqlite_OptimizedUnicode);
/* Set integer constants */
for (i = 0; _int_constants[i].constant_name != 0; i++) {
diff --git a/Modules/_sqlite/module.h b/Modules/_sqlite/module.h
index e514bd1..ada6b4c 100644
--- a/Modules/_sqlite/module.h
+++ b/Modules/_sqlite/module.h
@@ -25,20 +25,20 @@
#define PYSQLITE_MODULE_H
#include "Python.h"
-#define PYSQLITE_VERSION "2.3.2"
-
-extern PyObject* Error;
-extern PyObject* Warning;
-extern PyObject* InterfaceError;
-extern PyObject* DatabaseError;
-extern PyObject* InternalError;
-extern PyObject* OperationalError;
-extern PyObject* ProgrammingError;
-extern PyObject* IntegrityError;
-extern PyObject* DataError;
-extern PyObject* NotSupportedError;
-
-extern PyObject* OptimizedUnicode;
+#define PYSQLITE_VERSION "2.3.3"
+
+extern PyObject* pysqlite_Error;
+extern PyObject* pysqlite_Warning;
+extern PyObject* pysqlite_InterfaceError;
+extern PyObject* pysqlite_DatabaseError;
+extern PyObject* pysqlite_InternalError;
+extern PyObject* pysqlite_OperationalError;
+extern PyObject* pysqlite_ProgrammingError;
+extern PyObject* pysqlite_IntegrityError;
+extern PyObject* pysqlite_DataError;
+extern PyObject* pysqlite_NotSupportedError;
+
+extern PyObject* pysqlite_OptimizedUnicode;
/* the functions time.time() and time.sleep() */
extern PyObject* time_time;
diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c
index 26b663b..a8ca518 100644
--- a/Modules/_sqlite/prepare_protocol.c
+++ b/Modules/_sqlite/prepare_protocol.c
@@ -23,23 +23,23 @@
#include "prepare_protocol.h"
-int prepare_protocol_init(SQLitePrepareProtocol* self, PyObject* args, PyObject* kwargs)
+int pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol* self, PyObject* args, PyObject* kwargs)
{
return 0;
}
-void prepare_protocol_dealloc(SQLitePrepareProtocol* self)
+void pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol* self)
{
self->ob_type->tp_free((PyObject*)self);
}
-PyTypeObject SQLitePrepareProtocolType= {
+PyTypeObject pysqlite_PrepareProtocolType= {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".PrepareProtocol", /* tp_name */
- sizeof(SQLitePrepareProtocol), /* tp_basicsize */
+ sizeof(pysqlite_PrepareProtocol), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)prepare_protocol_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_prepare_protocol_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -70,15 +70,15 @@ PyTypeObject SQLitePrepareProtocolType= {
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)prepare_protocol_init, /* tp_init */
+ (initproc)pysqlite_prepare_protocol_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0 /* tp_free */
};
-extern int prepare_protocol_setup_types(void)
+extern int pysqlite_prepare_protocol_setup_types(void)
{
- SQLitePrepareProtocolType.tp_new = PyType_GenericNew;
- SQLitePrepareProtocolType.ob_type= &PyType_Type;
- return PyType_Ready(&SQLitePrepareProtocolType);
+ pysqlite_PrepareProtocolType.tp_new = PyType_GenericNew;
+ pysqlite_PrepareProtocolType.ob_type= &PyType_Type;
+ return PyType_Ready(&pysqlite_PrepareProtocolType);
}
diff --git a/Modules/_sqlite/prepare_protocol.h b/Modules/_sqlite/prepare_protocol.h
index 2fc4f61..4c1e4f3 100644
--- a/Modules/_sqlite/prepare_protocol.h
+++ b/Modules/_sqlite/prepare_protocol.h
@@ -28,14 +28,14 @@
typedef struct
{
PyObject_HEAD
-} SQLitePrepareProtocol;
+} pysqlite_PrepareProtocol;
-extern PyTypeObject SQLitePrepareProtocolType;
+extern PyTypeObject pysqlite_PrepareProtocolType;
-int prepare_protocol_init(SQLitePrepareProtocol* self, PyObject* args, PyObject* kwargs);
-void prepare_protocol_dealloc(SQLitePrepareProtocol* self);
+int pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol* self, PyObject* args, PyObject* kwargs);
+void pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol* self);
-int prepare_protocol_setup_types(void);
+int pysqlite_prepare_protocol_setup_types(void);
#define UNKNOWN (-1)
#endif
diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c
index 80b6135..7cfcfc3 100644
--- a/Modules/_sqlite/row.c
+++ b/Modules/_sqlite/row.c
@@ -25,7 +25,7 @@
#include "cursor.h"
#include "sqlitecompat.h"
-void row_dealloc(Row* self)
+void pysqlite_row_dealloc(pysqlite_Row* self)
{
Py_XDECREF(self->data);
Py_XDECREF(self->description);
@@ -33,10 +33,10 @@ void row_dealloc(Row* self)
self->ob_type->tp_free((PyObject*)self);
}
-int row_init(Row* self, PyObject* args, PyObject* kwargs)
+int pysqlite_row_init(pysqlite_Row* self, PyObject* args, PyObject* kwargs)
{
PyObject* data;
- Cursor* cursor;
+ pysqlite_Cursor* cursor;
self->data = 0;
self->description = 0;
@@ -45,7 +45,7 @@ int row_init(Row* self, PyObject* args, PyObject* kwargs)
return -1;
}
- if (!PyObject_IsInstance((PyObject*)cursor, (PyObject*)&CursorType)) {
+ if (!PyObject_IsInstance((PyObject*)cursor, (PyObject*)&pysqlite_CursorType)) {
PyErr_SetString(PyExc_TypeError, "instance of cursor required for first argument");
return -1;
}
@@ -64,7 +64,7 @@ int row_init(Row* self, PyObject* args, PyObject* kwargs)
return 0;
}
-PyObject* row_subscript(Row* self, PyObject* idx)
+PyObject* pysqlite_row_subscript(pysqlite_Row* self, PyObject* idx)
{
long _idx;
char* key;
@@ -133,32 +133,63 @@ PyObject* row_subscript(Row* self, PyObject* idx)
}
}
-Py_ssize_t row_length(Row* self, PyObject* args, PyObject* kwargs)
+Py_ssize_t pysqlite_row_length(pysqlite_Row* self, PyObject* args, PyObject* kwargs)
{
return PyTuple_GET_SIZE(self->data);
}
-static int row_print(Row* self, FILE *fp, int flags)
+PyObject* pysqlite_row_keys(pysqlite_Row* self, PyObject* args, PyObject* kwargs)
+{
+ PyObject* list;
+ int nitems, i;
+
+ list = PyList_New(0);
+ if (!list) {
+ return NULL;
+ }
+ nitems = PyTuple_Size(self->description);
+
+ for (i = 0; i < nitems; i++) {
+ if (PyList_Append(list, PyTuple_GET_ITEM(PyTuple_GET_ITEM(self->description, i), 0)) != 0) {
+ Py_DECREF(list);
+ return NULL;
+ }
+ }
+
+ return list;
+}
+
+static int pysqlite_row_print(pysqlite_Row* self, FILE *fp, int flags)
{
return (&PyTuple_Type)->tp_print(self->data, fp, flags);
}
+static PyObject* pysqlite_iter(pysqlite_Row* self)
+{
+ return PyObject_GetIter(self->data);
+}
-PyMappingMethods row_as_mapping = {
- /* mp_length */ (lenfunc)row_length,
- /* mp_subscript */ (binaryfunc)row_subscript,
+PyMappingMethods pysqlite_row_as_mapping = {
+ /* mp_length */ (lenfunc)pysqlite_row_length,
+ /* mp_subscript */ (binaryfunc)pysqlite_row_subscript,
/* mp_ass_subscript */ (objobjargproc)0,
};
+static PyMethodDef pysqlite_row_methods[] = {
+ {"keys", (PyCFunction)pysqlite_row_keys, METH_NOARGS,
+ PyDoc_STR("Returns the keys of the row.")},
+ {NULL, NULL}
+};
+
-PyTypeObject RowType = {
+PyTypeObject pysqlite_RowType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".Row", /* tp_name */
- sizeof(Row), /* tp_basicsize */
+ sizeof(pysqlite_Row), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)row_dealloc, /* tp_dealloc */
- (printfunc)row_print, /* tp_print */
+ (destructor)pysqlite_row_dealloc, /* tp_dealloc */
+ (printfunc)pysqlite_row_print, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
@@ -174,13 +205,13 @@ PyTypeObject RowType = {
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
- 0, /* tp_traverse */
+ (traverseproc)0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
- 0, /* tp_iter */
+ (getiterfunc)pysqlite_iter, /* tp_iter */
0, /* tp_iternext */
- 0, /* tp_methods */
+ pysqlite_row_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
@@ -188,15 +219,15 @@ PyTypeObject RowType = {
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
- (initproc)row_init, /* tp_init */
+ (initproc)pysqlite_row_init, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0 /* tp_free */
};
-extern int row_setup_types(void)
+extern int pysqlite_row_setup_types(void)
{
- RowType.tp_new = PyType_GenericNew;
- RowType.tp_as_mapping = &row_as_mapping;
- return PyType_Ready(&RowType);
+ pysqlite_RowType.tp_new = PyType_GenericNew;
+ pysqlite_RowType.tp_as_mapping = &pysqlite_row_as_mapping;
+ return PyType_Ready(&pysqlite_RowType);
}
diff --git a/Modules/_sqlite/row.h b/Modules/_sqlite/row.h
index c6e083c..b92225b 100644
--- a/Modules/_sqlite/row.h
+++ b/Modules/_sqlite/row.h
@@ -30,10 +30,10 @@ typedef struct _Row
PyObject_HEAD
PyObject* data;
PyObject* description;
-} Row;
+} pysqlite_Row;
-extern PyTypeObject RowType;
+extern PyTypeObject pysqlite_RowType;
-int row_setup_types(void);
+int pysqlite_row_setup_types(void);
#endif
diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c
index 7763d10..870e776 100644
--- a/Modules/_sqlite/statement.c
+++ b/Modules/_sqlite/statement.c
@@ -29,7 +29,7 @@
#include "sqlitecompat.h"
/* prototypes */
-int check_remaining_sql(const char* tail);
+static int pysqlite_check_remaining_sql(const char* tail);
typedef enum {
LINECOMMENT_1,
@@ -40,7 +40,7 @@ typedef enum {
NORMAL
} parse_remaining_sql_state;
-int statement_create(Statement* self, Connection* connection, PyObject* sql)
+int pysqlite_statement_create(pysqlite_Statement* self, pysqlite_Connection* connection, PyObject* sql)
{
const char* tail;
int rc;
@@ -77,7 +77,7 @@ int statement_create(Statement* self, Connection* connection, PyObject* sql)
self->db = connection->db;
- if (rc == SQLITE_OK && check_remaining_sql(tail)) {
+ if (rc == SQLITE_OK && pysqlite_check_remaining_sql(tail)) {
(void)sqlite3_finalize(self->st);
self->st = NULL;
rc = PYSQLITE_TOO_MUCH_SQL;
@@ -86,7 +86,7 @@ int statement_create(Statement* self, Connection* connection, PyObject* sql)
return rc;
}
-int statement_bind_parameter(Statement* self, int pos, PyObject* parameter)
+int pysqlite_statement_bind_parameter(pysqlite_Statement* self, int pos, PyObject* parameter)
{
int rc = SQLITE_OK;
long longval;
@@ -133,7 +133,7 @@ int statement_bind_parameter(Statement* self, int pos, PyObject* parameter)
return rc;
}
-void statement_bind_parameters(Statement* self, PyObject* parameters)
+void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* parameters)
{
PyObject* current_param;
PyObject* adapted;
@@ -154,19 +154,19 @@ void statement_bind_parameters(Statement* self, PyObject* parameters)
binding_name = sqlite3_bind_parameter_name(self->st, i);
Py_END_ALLOW_THREADS
if (!binding_name) {
- PyErr_Format(ProgrammingError, "Binding %d has no name, but you supplied a dictionary (which has only names).", i);
+ PyErr_Format(pysqlite_ProgrammingError, "Binding %d has no name, but you supplied a dictionary (which has only names).", i);
return;
}
binding_name++; /* skip first char (the colon) */
current_param = PyDict_GetItemString(parameters, binding_name);
if (!current_param) {
- PyErr_Format(ProgrammingError, "You did not supply a value for binding %d.", i);
+ PyErr_Format(pysqlite_ProgrammingError, "You did not supply a value for binding %d.", i);
return;
}
Py_INCREF(current_param);
- adapted = microprotocols_adapt(current_param, (PyObject*)&SQLitePrepareProtocolType, NULL);
+ adapted = microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
if (adapted) {
Py_DECREF(current_param);
} else {
@@ -174,11 +174,11 @@ void statement_bind_parameters(Statement* self, PyObject* parameters)
adapted = current_param;
}
- rc = statement_bind_parameter(self, i, adapted);
+ rc = pysqlite_statement_bind_parameter(self, i, adapted);
Py_DECREF(adapted);
if (rc != SQLITE_OK) {
- PyErr_Format(InterfaceError, "Error binding parameter :%s - probably unsupported type.", binding_name);
+ PyErr_Format(pysqlite_InterfaceError, "Error binding parameter :%s - probably unsupported type.", binding_name);
return;
}
}
@@ -186,7 +186,7 @@ void statement_bind_parameters(Statement* self, PyObject* parameters)
/* parameters passed as sequence */
num_params = PySequence_Length(parameters);
if (num_params != num_params_needed) {
- PyErr_Format(ProgrammingError, "Incorrect number of bindings supplied. The current statement uses %d, and there are %d supplied.",
+ PyErr_Format(pysqlite_ProgrammingError, "Incorrect number of bindings supplied. The current statement uses %d, and there are %d supplied.",
num_params_needed, num_params);
return;
}
@@ -195,7 +195,7 @@ void statement_bind_parameters(Statement* self, PyObject* parameters)
if (!current_param) {
return;
}
- adapted = microprotocols_adapt(current_param, (PyObject*)&SQLitePrepareProtocolType, NULL);
+ adapted = microprotocols_adapt(current_param, (PyObject*)&pysqlite_PrepareProtocolType, NULL);
if (adapted) {
Py_DECREF(current_param);
@@ -204,18 +204,18 @@ void statement_bind_parameters(Statement* self, PyObject* parameters)
adapted = current_param;
}
- rc = statement_bind_parameter(self, i + 1, adapted);
+ rc = pysqlite_statement_bind_parameter(self, i + 1, adapted);
Py_DECREF(adapted);
if (rc != SQLITE_OK) {
- PyErr_Format(InterfaceError, "Error binding parameter %d - probably unsupported type.", i);
+ PyErr_Format(pysqlite_InterfaceError, "Error binding parameter %d - probably unsupported type.", i);
return;
}
}
}
}
-int statement_recompile(Statement* self, PyObject* params)
+int pysqlite_statement_recompile(pysqlite_Statement* self, PyObject* params)
{
const char* tail;
int rc;
@@ -250,7 +250,7 @@ int statement_recompile(Statement* self, PyObject* params)
return rc;
}
-int statement_finalize(Statement* self)
+int pysqlite_statement_finalize(pysqlite_Statement* self)
{
int rc;
@@ -267,7 +267,7 @@ int statement_finalize(Statement* self)
return rc;
}
-int statement_reset(Statement* self)
+int pysqlite_statement_reset(pysqlite_Statement* self)
{
int rc;
@@ -286,12 +286,12 @@ int statement_reset(Statement* self)
return rc;
}
-void statement_mark_dirty(Statement* self)
+void pysqlite_statement_mark_dirty(pysqlite_Statement* self)
{
self->in_use = 1;
}
-void statement_dealloc(Statement* self)
+void pysqlite_statement_dealloc(pysqlite_Statement* self)
{
int rc;
@@ -320,7 +320,7 @@ void statement_dealloc(Statement* self)
*
* Returns 1 if there is more left than should be. 0 if ok.
*/
-int check_remaining_sql(const char* tail)
+static int pysqlite_check_remaining_sql(const char* tail)
{
const char* pos = tail;
@@ -382,13 +382,13 @@ int check_remaining_sql(const char* tail)
return 0;
}
-PyTypeObject StatementType = {
+PyTypeObject pysqlite_StatementType = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MODULE_NAME ".Statement", /* tp_name */
- sizeof(Statement), /* tp_basicsize */
+ sizeof(pysqlite_Statement), /* tp_basicsize */
0, /* tp_itemsize */
- (destructor)statement_dealloc, /* tp_dealloc */
+ (destructor)pysqlite_statement_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -408,7 +408,7 @@ PyTypeObject StatementType = {
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
- offsetof(Statement, in_weakreflist), /* tp_weaklistoffset */
+ offsetof(pysqlite_Statement, in_weakreflist), /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
0, /* tp_methods */
@@ -425,8 +425,8 @@ PyTypeObject StatementType = {
0 /* tp_free */
};
-extern int statement_setup_types(void)
+extern int pysqlite_statement_setup_types(void)
{
- StatementType.tp_new = PyType_GenericNew;
- return PyType_Ready(&StatementType);
+ pysqlite_StatementType.tp_new = PyType_GenericNew;
+ return PyType_Ready(&pysqlite_StatementType);
}
diff --git a/Modules/_sqlite/statement.h b/Modules/_sqlite/statement.h
index 57ee36f..10b8823 100644
--- a/Modules/_sqlite/statement.h
+++ b/Modules/_sqlite/statement.h
@@ -39,21 +39,21 @@ typedef struct
PyObject* sql;
int in_use;
PyObject* in_weakreflist; /* List of weak references */
-} Statement;
+} pysqlite_Statement;
-extern PyTypeObject StatementType;
+extern PyTypeObject pysqlite_StatementType;
-int statement_create(Statement* self, Connection* connection, PyObject* sql);
-void statement_dealloc(Statement* self);
+int pysqlite_statement_create(pysqlite_Statement* self, pysqlite_Connection* connection, PyObject* sql);
+void pysqlite_statement_dealloc(pysqlite_Statement* self);
-int statement_bind_parameter(Statement* self, int pos, PyObject* parameter);
-void statement_bind_parameters(Statement* self, PyObject* parameters);
+int pysqlite_statement_bind_parameter(pysqlite_Statement* self, int pos, PyObject* parameter);
+void pysqlite_statement_bind_parameters(pysqlite_Statement* self, PyObject* parameters);
-int statement_recompile(Statement* self, PyObject* parameters);
-int statement_finalize(Statement* self);
-int statement_reset(Statement* self);
-void statement_mark_dirty(Statement* self);
+int pysqlite_statement_recompile(pysqlite_Statement* self, PyObject* parameters);
+int pysqlite_statement_finalize(pysqlite_Statement* self);
+int pysqlite_statement_reset(pysqlite_Statement* self);
+void pysqlite_statement_mark_dirty(pysqlite_Statement* self);
-int statement_setup_types(void);
+int pysqlite_statement_setup_types(void);
#endif
diff --git a/Modules/_sqlite/util.c b/Modules/_sqlite/util.c
index f5a7233..b70297b 100644
--- a/Modules/_sqlite/util.c
+++ b/Modules/_sqlite/util.c
@@ -24,8 +24,7 @@
#include "module.h"
#include "connection.h"
-int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, Connection* connection
-)
+int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, pysqlite_Connection* connection)
{
int rc;
@@ -40,7 +39,7 @@ int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, Connection* connectio
* Checks the SQLite error code and sets the appropriate DB-API exception.
* Returns the error code (0 means no error occurred).
*/
-int _seterror(sqlite3* db)
+int _pysqlite_seterror(sqlite3* db)
{
int errorcode;
@@ -53,7 +52,7 @@ int _seterror(sqlite3* db)
break;
case SQLITE_INTERNAL:
case SQLITE_NOTFOUND:
- PyErr_SetString(InternalError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_InternalError, sqlite3_errmsg(db));
break;
case SQLITE_NOMEM:
(void)PyErr_NoMemory();
@@ -71,23 +70,23 @@ int _seterror(sqlite3* db)
case SQLITE_PROTOCOL:
case SQLITE_EMPTY:
case SQLITE_SCHEMA:
- PyErr_SetString(OperationalError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_OperationalError, sqlite3_errmsg(db));
break;
case SQLITE_CORRUPT:
- PyErr_SetString(DatabaseError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_DatabaseError, sqlite3_errmsg(db));
break;
case SQLITE_TOOBIG:
- PyErr_SetString(DataError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_DataError, sqlite3_errmsg(db));
break;
case SQLITE_CONSTRAINT:
case SQLITE_MISMATCH:
- PyErr_SetString(IntegrityError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_IntegrityError, sqlite3_errmsg(db));
break;
case SQLITE_MISUSE:
- PyErr_SetString(ProgrammingError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_ProgrammingError, sqlite3_errmsg(db));
break;
default:
- PyErr_SetString(DatabaseError, sqlite3_errmsg(db));
+ PyErr_SetString(pysqlite_DatabaseError, sqlite3_errmsg(db));
break;
}
diff --git a/Modules/_sqlite/util.h b/Modules/_sqlite/util.h
index 7ce3d40..969c5e5 100644
--- a/Modules/_sqlite/util.h
+++ b/Modules/_sqlite/util.h
@@ -28,11 +28,11 @@
#include "sqlite3.h"
#include "connection.h"
-int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, Connection* connection);
+int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, pysqlite_Connection* connection);
/**
* Checks the SQLite error code and sets the appropriate DB-API exception.
* Returns the error code (0 means no error occurred).
*/
-int _seterror(sqlite3* db);
+int _pysqlite_seterror(sqlite3* db);
#endif
diff --git a/Objects/intobject.c b/Objects/intobject.c
index 256824c..31d8130 100644
--- a/Objects/intobject.c
+++ b/Objects/intobject.c
@@ -1050,8 +1050,9 @@ Convert a string or number to an integer, if possible. A floating point\n\
argument will be truncated towards zero (this does not include a string\n\
representation of a floating point number!) When converting a string, use\n\
the optional base. It is an error to supply a base when converting a\n\
-non-string. If the argument is outside the integer range a long object\n\
-will be returned instead.");
+non-string. If base is zero, the proper base is guessed based on the\n\
+string content. If the argument is outside the integer range a\n\
+long object will be returned instead.");
static PyNumberMethods int_as_number = {
(binaryfunc)int_add, /*nb_add*/
diff --git a/Objects/setobject.c b/Objects/setobject.c
index be829a8..a0cb7be 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -1024,7 +1024,7 @@ frozenset_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyObject *iterable = NULL, *result;
- if (!_PyArg_NoKeywords("frozenset()", kwds))
+ if (type == &PyFrozenSet_Type && !_PyArg_NoKeywords("frozenset()", kwds))
return NULL;
if (!PyArg_UnpackTuple(args, type->tp_name, 0, 1, &iterable))
@@ -1068,7 +1068,7 @@ PySet_Fini(void)
static PyObject *
set_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
- if (!_PyArg_NoKeywords("set()", kwds))
+ if (type == &PySet_Type && !_PyArg_NoKeywords("set()", kwds))
return NULL;
return make_new_set(type, NULL);
diff --git a/Python/Python-ast.c b/Python/Python-ast.c
index e1a5bce..9908326 100644
--- a/Python/Python-ast.c
+++ b/Python/Python-ast.c
@@ -3126,7 +3126,7 @@ init_ast(void)
if (PyDict_SetItemString(d, "AST", (PyObject*)AST_type) < 0) return;
if (PyModule_AddIntConstant(m, "PyCF_ONLY_AST", PyCF_ONLY_AST) < 0)
return;
- if (PyModule_AddStringConstant(m, "__version__", "53170") < 0)
+ if (PyModule_AddStringConstant(m, "__version__", "53349") < 0)
return;
if (PyDict_SetItemString(d, "mod", (PyObject*)mod_type) < 0) return;
if (PyDict_SetItemString(d, "Module", (PyObject*)Module_type) < 0)
diff --git a/Tools/pybench/pybench.py b/Tools/pybench/pybench.py
index e0ff732..89f6f9b 100755
--- a/Tools/pybench/pybench.py
+++ b/Tools/pybench/pybench.py
@@ -34,7 +34,7 @@ NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
"""
-import sys, time, operator, string
+import sys, time, operator, string, platform
from CommandLine import *
try:
@@ -102,27 +102,26 @@ def get_timer(timertype):
def get_machine_details():
- import platform
if _debug:
print 'Getting machine details...'
buildno, builddate = platform.python_build()
python = platform.python_version()
- if python > '2.0':
- try:
- unichr(100000)
- except ValueError:
- # UCS2 build (standard)
- unicode = 'UCS2'
- else:
- # UCS4 build (most recent Linux distros)
- unicode = 'UCS4'
- else:
+ try:
+ unichr(100000)
+ except ValueError:
+ # UCS2 build (standard)
+ unicode = 'UCS2'
+ except NameError:
unicode = None
+ else:
+ # UCS4 build (most recent Linux distros)
+ unicode = 'UCS4'
bits, linkage = platform.architecture()
return {
'platform': platform.platform(),
'processor': platform.processor(),
'executable': sys.executable,
+ 'implementation': platform.python_implementation(),
'python': platform.python_version(),
'compiler': platform.python_compiler(),
'buildno': buildno,
@@ -134,17 +133,18 @@ def get_machine_details():
def print_machine_details(d, indent=''):
l = ['Machine Details:',
- ' Platform ID: %s' % d.get('platform', 'n/a'),
- ' Processor: %s' % d.get('processor', 'n/a'),
+ ' Platform ID: %s' % d.get('platform', 'n/a'),
+ ' Processor: %s' % d.get('processor', 'n/a'),
'',
'Python:',
- ' Executable: %s' % d.get('executable', 'n/a'),
- ' Version: %s' % d.get('python', 'n/a'),
- ' Compiler: %s' % d.get('compiler', 'n/a'),
- ' Bits: %s' % d.get('bits', 'n/a'),
- ' Build: %s (#%s)' % (d.get('builddate', 'n/a'),
- d.get('buildno', 'n/a')),
- ' Unicode: %s' % d.get('unicode', 'n/a'),
+ ' Implementation: %s' % d.get('implementation', 'n/a'),
+ ' Executable: %s' % d.get('executable', 'n/a'),
+ ' Version: %s' % d.get('python', 'n/a'),
+ ' Compiler: %s' % d.get('compiler', 'n/a'),
+ ' Bits: %s' % d.get('bits', 'n/a'),
+ ' Build: %s (#%s)' % (d.get('builddate', 'n/a'),
+ d.get('buildno', 'n/a')),
+ ' Unicode: %s' % d.get('unicode', 'n/a'),
]
print indent + string.join(l, '\n' + indent) + '\n'
@@ -499,9 +499,10 @@ class Benchmark:
def calibrate(self):
- print 'Calibrating tests. Please wait...'
+ print 'Calibrating tests. Please wait...',
if self.verbose:
print
+ print
print 'Test min max'
print '-' * LINE
tests = self.tests.items()
@@ -514,6 +515,11 @@ class Benchmark:
(name,
min(test.overhead_times) * MILLI_SECONDS,
max(test.overhead_times) * MILLI_SECONDS)
+ if self.verbose:
+ print
+ print 'Done with the calibration.'
+ else:
+ print 'done.'
print
def run(self):
@@ -830,7 +836,9 @@ python pybench.py -s p25.pybench -c p21.pybench
print '-' * LINE
print 'PYBENCH %s' % __version__
print '-' * LINE
- print '* using Python %s' % (string.split(sys.version)[0])
+ print '* using %s %s' % (
+ platform.python_implementation(),
+ string.join(string.split(sys.version), ' '))
# Switch off garbage collection
if not withgc:
@@ -839,15 +847,23 @@ python pybench.py -s p25.pybench -c p21.pybench
except ImportError:
print '* Python version doesn\'t support garbage collection'
else:
- gc.disable()
- print '* disabled garbage collection'
+ try:
+ gc.disable()
+ except NotImplementedError:
+ print '* Python version doesn\'t support gc.disable'
+ else:
+ print '* disabled garbage collection'
# "Disable" sys check interval
if not withsyscheck:
# Too bad the check interval uses an int instead of a long...
value = 2147483647
- sys.setcheckinterval(value)
- print '* system check interval set to maximum: %s' % value
+ try:
+ sys.setcheckinterval(value)
+ except (AttributeError, NotImplementedError):
+ print '* Python version doesn\'t support sys.setcheckinterval'
+ else:
+ print '* system check interval set to maximum: %s' % value
if timer == TIMER_SYSTIMES_PROCESSTIME:
import systimes