Skip to content

bpo-37421: test_urllib calls urlcleanup() #14529

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 2, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions Lib/test/libregrtest/save_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import sys
import sysconfig
import threading
import urllib.request
import warnings
from test import support
from test.libregrtest.utils import print_warning
Expand Down Expand Up @@ -68,8 +69,20 @@ def __init__(self, testname, verbose=0, quiet=False, *, pgo=False):
'files', 'locale', 'warnings.showwarning',
'shutil_archive_formats', 'shutil_unpack_formats',
'asyncio.events._event_loop_policy',
'urllib.requests._url_tempfiles', 'urllib.requests._opener',
)

def get_urllib_requests__url_tempfiles(self):
return list(urllib.request._url_tempfiles)
def restore_urllib_requests__url_tempfiles(self, tempfiles):
for filename in tempfiles:
support.unlink(filename)

def get_urllib_requests__opener(self):
return urllib.request._opener
def restore_urllib_requests__opener(self, opener):
urllib.request._opener = opener

def get_asyncio_events__event_loop_policy(self):
return support.maybe_get_event_loop_policy()
def restore_asyncio_events__event_loop_policy(self, policy):
Expand Down
3 changes: 3 additions & 0 deletions Lib/test/test_robotparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,9 @@ def log_message(self, format, *args):
class PasswordProtectedSiteTestCase(unittest.TestCase):

def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

self.server = HTTPServer((support.HOST, 0), RobotHandler)

self.t = threading.Thread(
Expand Down
4 changes: 4 additions & 0 deletions Lib/test/test_sax.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import os.path
import shutil
from urllib.error import URLError
import urllib.request
from test import support
from test.support import findfile, run_unittest, FakePath, TESTFN

Expand Down Expand Up @@ -979,6 +980,9 @@ def test_expat_dtdhandler(self):
self.assertEqual(handler._entities, [("img", None, "expat.gif", "GIF")])

def test_expat_external_dtd_enabled(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

parser = create_parser()
parser.setFeature(feature_external_ges, True)
resolver = self.TestEntityRecorder()
Expand Down
10 changes: 10 additions & 0 deletions Lib/test/test_urllib.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,6 +545,9 @@ class urlopen_DataTests(unittest.TestCase):
"""Test urlopen() opening a data URL."""

def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

# text containing URL special- and unicode-characters
self.text = "test data URLs :;,%=& \u00f6 \u00c4 "
# 2x1 pixel RGB PNG image with one black and one white pixel
Expand Down Expand Up @@ -619,6 +622,9 @@ class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""

def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

# Create a list of temporary files. Each item in the list is a file
# name (absolute path or relative to the current working directory).
# All files in this list will be deleted in the tearDown method. Note,
Expand Down Expand Up @@ -759,6 +765,8 @@ class urlretrieve_HttpTests(unittest.TestCase, FakeHTTPMixin):
"""Test urllib.urlretrieve() using fake http connections"""

def test_short_content_raises_ContentTooShortError(self):
self.addCleanup(urllib.request.urlcleanup)

self.fakehttp(b'''HTTP/1.1 200 OK
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Expand All @@ -780,6 +788,8 @@ def _reporthook(par1, par2, par3):
self.unfakehttp()

def test_short_content_raises_ContentTooShortError_without_reporthook(self):
self.addCleanup(urllib.request.urlcleanup)

self.fakehttp(b'''HTTP/1.1 200 OK
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Expand Down
7 changes: 7 additions & 0 deletions Lib/test/test_urllib2.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ def test___all__(self):
def test_trivial(self):
# A couple trivial tests

# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url')

# XXX Name hacking to get this to work on Windows.
Expand Down Expand Up @@ -1290,6 +1293,10 @@ def test_redirect_fragment(self):

def test_redirect_no_path(self):
# Issue 14132: Relative redirect strips original path

# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

real_class = http.client.HTTPConnection
response1 = b"HTTP/1.1 302 Found\r\nLocation: ?query\r\n\r\n"
http.client.HTTPConnection = test_urllib.fakehttp(response1)
Expand Down
3 changes: 3 additions & 0 deletions Lib/test/test_urllib2_localnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,6 +447,9 @@ class TestUrlopen(unittest.TestCase):
def setUp(self):
super(TestUrlopen, self).setUp()

# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

# Ignore proxies for localhost tests.
def restore_environ(old_environ):
os.environ.clear()
Expand Down
7 changes: 7 additions & 0 deletions Lib/test/test_urllib2net.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,9 @@ class AuthTests(unittest.TestCase):
class CloseSocketTest(unittest.TestCase):

def test_close(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

# calling .close() on urllib2's response objects should close the
# underlying socket
url = support.TEST_HTTP_URL
Expand Down Expand Up @@ -257,6 +260,10 @@ def _extra_handlers(self):


class TimeoutTest(unittest.TestCase):
def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

def test_http_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
url = support.TEST_HTTP_URL
Expand Down
11 changes: 11 additions & 0 deletions Lib/test/test_urllibnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ def tearDown(self):
socket.setdefaulttimeout(None)

def testURLread(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc
with support.transient_internet(domain):
f = urllib.request.urlopen(support.TEST_HTTP_URL)
Expand All @@ -48,6 +51,10 @@ class urlopenNetworkTests(unittest.TestCase):

url = 'http://www.pythontest.net/'

def setUp(self):
# clear _opener global variable
self.addCleanup(urllib.request.urlcleanup)

@contextlib.contextmanager
def urlopen(self, *args, **kwargs):
resource = args[0]
Expand Down Expand Up @@ -144,6 +151,10 @@ def test_bad_address(self):
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.request.urlretrieve using the network."""

def setUp(self):
# remove temporary files created by urlretrieve()
self.addCleanup(urllib.request.urlcleanup)

@contextlib.contextmanager
def urlretrieve(self, *args, **kwargs):
resource = args[0]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
urllib.request tests now call :func:`~urllib.request.urlcleanup` to remove
temporary files created by ``urlretrieve()`` tests and to clear the ``_opener``
global variable set by ``urlopen()`` and functions calling indirectly
``urlopen()``.