diff options
Diffstat (limited to 'Lib/test')
-rw-r--r-- | Lib/test/libregrtest/save_env.py | 13 | ||||
-rw-r--r-- | Lib/test/test_robotparser.py | 3 | ||||
-rw-r--r-- | Lib/test/test_sax.py | 4 | ||||
-rw-r--r-- | Lib/test/test_urllib.py | 10 | ||||
-rw-r--r-- | Lib/test/test_urllib2.py | 7 | ||||
-rw-r--r-- | Lib/test/test_urllib2_localnet.py | 3 | ||||
-rw-r--r-- | Lib/test/test_urllib2net.py | 7 | ||||
-rw-r--r-- | Lib/test/test_urllibnet.py | 11 |
8 files changed, 58 insertions, 0 deletions
diff --git a/Lib/test/libregrtest/save_env.py b/Lib/test/libregrtest/save_env.py index 31931f2..e7c27a6 100644 --- a/Lib/test/libregrtest/save_env.py +++ b/Lib/test/libregrtest/save_env.py @@ -7,6 +7,7 @@ import shutil import sys import sysconfig import threading +import urllib.request import warnings from test import support from test.libregrtest.utils import print_warning @@ -68,8 +69,20 @@ class saved_test_environment: 'files', 'locale', 'warnings.showwarning', 'shutil_archive_formats', 'shutil_unpack_formats', 'asyncio.events._event_loop_policy', + 'urllib.requests._url_tempfiles', 'urllib.requests._opener', ) + def get_urllib_requests__url_tempfiles(self): + return list(urllib.request._url_tempfiles) + def restore_urllib_requests__url_tempfiles(self, tempfiles): + for filename in tempfiles: + support.unlink(filename) + + def get_urllib_requests__opener(self): + return urllib.request._opener + def restore_urllib_requests__opener(self, opener): + urllib.request._opener = opener + def get_asyncio_events__event_loop_policy(self): return support.maybe_get_event_loop_policy() def restore_asyncio_events__event_loop_policy(self, policy): diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 77cd7c4..f28d8be 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -309,6 +309,9 @@ class RobotHandler(BaseHTTPRequestHandler): class PasswordProtectedSiteTestCase(unittest.TestCase): def setUp(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + self.server = HTTPServer((support.HOST, 0), RobotHandler) self.t = threading.Thread( diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py index da4eb1d..ce3a422 100644 --- a/Lib/test/test_sax.py +++ b/Lib/test/test_sax.py @@ -20,6 +20,7 @@ import codecs import os.path import shutil from urllib.error import URLError +import urllib.request from test import support from test.support import findfile, run_unittest, FakePath, TESTFN @@ -979,6 +980,9 @@ class ExpatReaderTest(XmlTestBase): self.assertEqual(handler._entities, [("img", None, "expat.gif", "GIF")]) def test_expat_external_dtd_enabled(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + parser = create_parser() parser.setFeature(feature_external_ges, True) resolver = self.TestEntityRecorder() diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 801f0fd..8895421 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -545,6 +545,9 @@ class urlopen_DataTests(unittest.TestCase): """Test urlopen() opening a data URL.""" def setUp(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + # text containing URL special- and unicode-characters self.text = "test data URLs :;,%=& \u00f6 \u00c4 " # 2x1 pixel RGB PNG image with one black and one white pixel @@ -619,6 +622,9 @@ class urlretrieve_FileTests(unittest.TestCase): """Test urllib.urlretrieve() on local files""" def setUp(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + # Create a list of temporary files. Each item in the list is a file # name (absolute path or relative to the current working directory). # All files in this list will be deleted in the tearDown method. Note, @@ -759,6 +765,8 @@ class urlretrieve_HttpTests(unittest.TestCase, FakeHTTPMixin): """Test urllib.urlretrieve() using fake http connections""" def test_short_content_raises_ContentTooShortError(self): + self.addCleanup(urllib.request.urlcleanup) + self.fakehttp(b'''HTTP/1.1 200 OK Date: Wed, 02 Jan 2008 03:03:54 GMT Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e @@ -780,6 +788,8 @@ FF self.unfakehttp() def test_short_content_raises_ContentTooShortError_without_reporthook(self): + self.addCleanup(urllib.request.urlcleanup) + self.fakehttp(b'''HTTP/1.1 200 OK Date: Wed, 02 Jan 2008 03:03:54 GMT Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index c6d275e..debb3c2 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -47,6 +47,9 @@ class TrivialTests(unittest.TestCase): def test_trivial(self): # A couple trivial tests + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url') # XXX Name hacking to get this to work on Windows. @@ -1290,6 +1293,10 @@ class HandlerTests(unittest.TestCase): def test_redirect_no_path(self): # Issue 14132: Relative redirect strips original path + + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + real_class = http.client.HTTPConnection response1 = b"HTTP/1.1 302 Found\r\nLocation: ?query\r\n\r\n" http.client.HTTPConnection = test_urllib.fakehttp(response1) diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py index 591b48d..6b9b130 100644 --- a/Lib/test/test_urllib2_localnet.py +++ b/Lib/test/test_urllib2_localnet.py @@ -447,6 +447,9 @@ class TestUrlopen(unittest.TestCase): def setUp(self): super(TestUrlopen, self).setUp() + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + # Ignore proxies for localhost tests. def restore_environ(old_environ): os.environ.clear() diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py index 0f43d71..040a2ce 100644 --- a/Lib/test/test_urllib2net.py +++ b/Lib/test/test_urllib2net.py @@ -82,6 +82,9 @@ class AuthTests(unittest.TestCase): class CloseSocketTest(unittest.TestCase): def test_close(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + # calling .close() on urllib2's response objects should close the # underlying socket url = support.TEST_HTTP_URL @@ -257,6 +260,10 @@ class OtherNetworkTests(unittest.TestCase): class TimeoutTest(unittest.TestCase): + def setUp(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = support.TEST_HTTP_URL diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index d394ced..848ab84 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -25,6 +25,9 @@ class URLTimeoutTest(unittest.TestCase): socket.setdefaulttimeout(None) def testURLread(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc with support.transient_internet(domain): f = urllib.request.urlopen(support.TEST_HTTP_URL) @@ -48,6 +51,10 @@ class urlopenNetworkTests(unittest.TestCase): url = 'http://www.pythontest.net/' + def setUp(self): + # clear _opener global variable + self.addCleanup(urllib.request.urlcleanup) + @contextlib.contextmanager def urlopen(self, *args, **kwargs): resource = args[0] @@ -144,6 +151,10 @@ class urlopenNetworkTests(unittest.TestCase): class urlretrieveNetworkTests(unittest.TestCase): """Tests urllib.request.urlretrieve using the network.""" + def setUp(self): + # remove temporary files created by urlretrieve() + self.addCleanup(urllib.request.urlcleanup) + @contextlib.contextmanager def urlretrieve(self, *args, **kwargs): resource = args[0] |