Refs #23919 -- Replaced io.open() with open().
io.open() is an alias for open() on Python 3.
This commit is contained in:
parent
eb422e476f
commit
a556396339
|
@ -2,7 +2,6 @@
|
|||
import errno
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
import pickle
|
||||
import random
|
||||
|
@ -32,7 +31,7 @@ class FileBasedCache(BaseCache):
|
|||
def get(self, key, default=None, version=None):
|
||||
fname = self._key_to_file(key, version)
|
||||
try:
|
||||
with io.open(fname, 'rb') as f:
|
||||
with open(fname, 'rb') as f:
|
||||
if not self._is_expired(f):
|
||||
return pickle.loads(zlib.decompress(f.read()))
|
||||
except IOError as e:
|
||||
|
@ -47,7 +46,7 @@ class FileBasedCache(BaseCache):
|
|||
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
|
||||
renamed = False
|
||||
try:
|
||||
with io.open(fd, 'wb') as f:
|
||||
with open(fd, 'wb') as f:
|
||||
expiry = self.get_backend_timeout(timeout)
|
||||
f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
|
||||
f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
|
||||
|
@ -74,7 +73,7 @@ class FileBasedCache(BaseCache):
|
|||
def has_key(self, key, version=None):
|
||||
fname = self._key_to_file(key, version)
|
||||
if os.path.exists(fname):
|
||||
with io.open(fname, 'rb') as f:
|
||||
with open(fname, 'rb') as f:
|
||||
return not self._is_expired(f)
|
||||
return False
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import fnmatch
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
@ -106,7 +105,7 @@ class BuildFile(object):
|
|||
return
|
||||
|
||||
encoding = settings.FILE_CHARSET if self.command.settings_available else 'utf-8'
|
||||
with io.open(self.path, 'r', encoding=encoding) as fp:
|
||||
with open(self.path, 'r', encoding=encoding) as fp:
|
||||
src_data = fp.read()
|
||||
|
||||
if self.domain == 'djangojs':
|
||||
|
@ -114,7 +113,7 @@ class BuildFile(object):
|
|||
elif self.domain == 'django':
|
||||
content = templatize(src_data, origin=self.path[2:], charset=encoding)
|
||||
|
||||
with io.open(self.work_path, 'w', encoding='utf-8') as fp:
|
||||
with open(self.work_path, 'w', encoding='utf-8') as fp:
|
||||
fp.write(content)
|
||||
|
||||
def postprocess_messages(self, msgs):
|
||||
|
@ -190,7 +189,7 @@ def write_pot_file(potfile, msgs):
|
|||
header_read = True
|
||||
lines.append(line)
|
||||
msgs = '\n'.join(lines)
|
||||
with io.open(potfile, 'a', encoding='utf-8') as fp:
|
||||
with open(potfile, 'a', encoding='utf-8') as fp:
|
||||
fp.write(msgs)
|
||||
|
||||
|
||||
|
@ -412,7 +411,7 @@ class Command(BaseCommand):
|
|||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
msgs = normalize_eols(msgs)
|
||||
with io.open(potfile, 'w', encoding='utf-8') as fp:
|
||||
with open(potfile, 'w', encoding='utf-8') as fp:
|
||||
fp.write(msgs)
|
||||
potfiles.append(potfile)
|
||||
return potfiles
|
||||
|
@ -613,14 +612,14 @@ class Command(BaseCommand):
|
|||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
else:
|
||||
with io.open(potfile, 'r', encoding='utf-8') as fp:
|
||||
with open(potfile, 'r', encoding='utf-8') as fp:
|
||||
msgs = fp.read()
|
||||
if not self.invoked_for_django:
|
||||
msgs = self.copy_plural_forms(msgs, locale)
|
||||
msgs = normalize_eols(msgs)
|
||||
msgs = msgs.replace(
|
||||
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
|
||||
with io.open(pofile, 'w', encoding='utf-8') as fp:
|
||||
with open(pofile, 'w', encoding='utf-8') as fp:
|
||||
fp.write(msgs)
|
||||
|
||||
if self.no_obsolete:
|
||||
|
@ -647,7 +646,7 @@ class Command(BaseCommand):
|
|||
for domain in domains:
|
||||
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
|
||||
if os.path.exists(django_po):
|
||||
with io.open(django_po, 'r', encoding='utf-8') as fp:
|
||||
with open(django_po, 'r', encoding='utf-8') as fp:
|
||||
m = plural_forms_re.search(fp.read())
|
||||
if m:
|
||||
plural_form_line = force_str(m.group('value'))
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import io
|
||||
import os
|
||||
import sys
|
||||
from itertools import takewhile
|
||||
|
@ -210,7 +209,7 @@ class Command(BaseCommand):
|
|||
# We just do this once per app
|
||||
directory_created[app_label] = True
|
||||
migration_string = writer.as_string()
|
||||
with io.open(writer.path, "w", encoding='utf-8') as fh:
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(migration_string)
|
||||
elif self.verbosity == 3:
|
||||
# Alternatively, makemigrations --dry-run --verbosity 3
|
||||
|
@ -289,7 +288,7 @@ class Command(BaseCommand):
|
|||
|
||||
if not self.dry_run:
|
||||
# Write the merge migrations file to the disk
|
||||
with io.open(writer.path, "w", encoding='utf-8') as fh:
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(writer.as_string())
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write("\nCreated new merge migration %s" % writer.path)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import io
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, connections, migrations
|
||||
|
@ -163,7 +161,7 @@ class Command(BaseCommand):
|
|||
|
||||
# Write out the new migration file
|
||||
writer = MigrationWriter(new_migration)
|
||||
with io.open(writer.path, "w", encoding='utf-8') as fh:
|
||||
with open(writer.path, "w", encoding='utf-8') as fh:
|
||||
fh.write(writer.as_string())
|
||||
|
||||
if self.verbosity > 0:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import cgi
|
||||
import errno
|
||||
import io
|
||||
import mimetypes
|
||||
import os
|
||||
import posixpath
|
||||
|
@ -158,11 +157,11 @@ class TemplateCommand(BaseCommand):
|
|||
# Only render the Python files, as we don't want to
|
||||
# accidentally render Django templates files
|
||||
if new_path.endswith(extensions) or filename in extra_files:
|
||||
with io.open(old_path, 'r', encoding='utf-8') as template_file:
|
||||
with open(old_path, 'r', encoding='utf-8') as template_file:
|
||||
content = template_file.read()
|
||||
template = Engine().from_string(content)
|
||||
content = template.render(context)
|
||||
with io.open(new_path, 'w', encoding='utf-8') as new_file:
|
||||
with open(new_path, 'w', encoding='utf-8') as new_file:
|
||||
new_file.write(content)
|
||||
else:
|
||||
shutil.copyfile(old_path, new_path)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import errno
|
||||
import io
|
||||
import string
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -30,7 +29,7 @@ class TemplateStrings(BaseEngine):
|
|||
tried = []
|
||||
for template_file in self.iter_template_filenames(template_name):
|
||||
try:
|
||||
with io.open(template_file, encoding=settings.FILE_CHARSET) as fp:
|
||||
with open(template_file, encoding=settings.FILE_CHARSET) as fp:
|
||||
template_code = fp.read()
|
||||
except IOError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
|
|
|
@ -3,7 +3,6 @@ Wrapper for loading templates from the filesystem.
|
|||
"""
|
||||
|
||||
import errno
|
||||
import io
|
||||
|
||||
from django.core.exceptions import SuspiciousFileOperation
|
||||
from django.template import Origin, TemplateDoesNotExist
|
||||
|
@ -23,7 +22,7 @@ class Loader(BaseLoader):
|
|||
|
||||
def get_contents(self, origin):
|
||||
try:
|
||||
with io.open(origin.name, encoding=self.engine.file_charset) as fp:
|
||||
with open(origin.name, encoding=self.engine.file_charset) as fp:
|
||||
return fp.read()
|
||||
except IOError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
|
|
|
@ -1370,7 +1370,7 @@ class FileBasedCacheTests(BaseCacheTests, TestCase):
|
|||
self.assertEqual(cache.get('foo', 'baz'), 'baz')
|
||||
|
||||
def test_get_does_not_ignore_non_enoent_errno_values(self):
|
||||
with mock.patch.object(io, 'open', side_effect=IOError):
|
||||
with mock.patch('builtins.open', side_effect=IOError):
|
||||
with self.assertRaises(IOError):
|
||||
cache.get('foo')
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import io
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
@ -133,7 +132,7 @@ class BasicExtractorTests(ExtractorTests):
|
|||
"""
|
||||
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
||||
self.assertTrue(os.path.exists(self.PO_FILE))
|
||||
with io.open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
po_contents = fp.read()
|
||||
# Check two random strings
|
||||
self.assertIn('#. Translators: One-line translator comment #1', po_contents)
|
||||
|
@ -142,7 +141,7 @@ class BasicExtractorTests(ExtractorTests):
|
|||
def test_comments_extractor(self):
|
||||
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
||||
self.assertTrue(os.path.exists(self.PO_FILE))
|
||||
with io.open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
po_contents = fp.read()
|
||||
self.assertNotIn('This comment should not be extracted', po_contents)
|
||||
|
||||
|
@ -175,7 +174,7 @@ class BasicExtractorTests(ExtractorTests):
|
|||
def test_special_char_extracted(self):
|
||||
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
||||
self.assertTrue(os.path.exists(self.PO_FILE))
|
||||
with io.open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
with open(self.PO_FILE, 'r', encoding='utf-8') as fp:
|
||||
po_contents = fp.read()
|
||||
self.assertMsgId("Non-breaking space\u00a0:", po_contents)
|
||||
|
||||
|
@ -390,7 +389,7 @@ class BasicExtractorTests(ExtractorTests):
|
|||
shutil.copyfile(BR_PO_BASE + '.pristine', BR_PO_BASE + '.po')
|
||||
management.call_command('makemessages', locale=['pt_BR'], verbosity=0)
|
||||
self.assertTrue(os.path.exists(BR_PO_BASE + '.po'))
|
||||
with io.open(BR_PO_BASE + '.po', 'r', encoding='utf-8') as fp:
|
||||
with open(BR_PO_BASE + '.po', 'r', encoding='utf-8') as fp:
|
||||
po_contents = force_text(fp.read())
|
||||
self.assertMsgStr("Größe", po_contents)
|
||||
|
||||
|
@ -515,7 +514,7 @@ class CopyPluralFormsExtractorTests(ExtractorTests):
|
|||
"""Ticket #20311."""
|
||||
management.call_command('makemessages', locale=['es'], extensions=['djtpl'], verbosity=0)
|
||||
self.assertTrue(os.path.exists(self.PO_FILE_ES))
|
||||
with io.open(self.PO_FILE_ES, 'r', encoding='utf-8') as fp:
|
||||
with open(self.PO_FILE_ES, 'r', encoding='utf-8') as fp:
|
||||
po_contents = fp.read()
|
||||
found = re.findall(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL)
|
||||
self.assertEqual(1, len(found))
|
||||
|
|
|
@ -655,7 +655,7 @@ class MakeMigrationsTests(MigrationTestBase):
|
|||
initial_file = os.path.join(migration_dir, "0001_initial.py")
|
||||
self.assertTrue(os.path.exists(initial_file))
|
||||
|
||||
with io.open(initial_file, 'r', encoding='utf-8') as fp:
|
||||
with open(initial_file, 'r', encoding='utf-8') as fp:
|
||||
content = fp.read()
|
||||
self.assertIn('migrations.CreateModel', content)
|
||||
self.assertIn('initial = True', content)
|
||||
|
@ -798,7 +798,7 @@ class MakeMigrationsTests(MigrationTestBase):
|
|||
initial_file = os.path.join(migration_dir, "0001_initial.py")
|
||||
self.assertTrue(os.path.exists(initial_file))
|
||||
|
||||
with io.open(initial_file, 'r', encoding='utf-8') as fp:
|
||||
with open(initial_file, 'r', encoding='utf-8') as fp:
|
||||
content = fp.read()
|
||||
|
||||
# Remove all whitespace to check for empty dependencies and operations
|
||||
|
@ -1192,7 +1192,7 @@ class MakeMigrationsTests(MigrationTestBase):
|
|||
migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name))
|
||||
# Check for existing migration file in migration folder
|
||||
self.assertTrue(os.path.exists(migration_file))
|
||||
with io.open(migration_file, "r", encoding="utf-8") as fp:
|
||||
with open(migration_file, "r", encoding="utf-8") as fp:
|
||||
content = fp.read()
|
||||
content = content.replace(" ", "")
|
||||
return content
|
||||
|
@ -1307,7 +1307,7 @@ class SquashMigrationsTests(MigrationTestBase):
|
|||
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
|
||||
|
||||
squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py")
|
||||
with io.open(squashed_migration_file, "r", encoding="utf-8") as fp:
|
||||
with open(squashed_migration_file, "r", encoding="utf-8") as fp:
|
||||
content = fp.read()
|
||||
self.assertIn("initial = True", content)
|
||||
|
||||
|
@ -1340,7 +1340,7 @@ class SquashMigrationsTests(MigrationTestBase):
|
|||
interactive=False, verbosity=1, stdout=out)
|
||||
|
||||
squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py")
|
||||
with io.open(squashed_migration_file, "r", encoding="utf-8") as fp:
|
||||
with open(squashed_migration_file, "r", encoding="utf-8") as fp:
|
||||
content = fp.read()
|
||||
self.assertIn(" ('migrations', '0001_initial')", content)
|
||||
self.assertNotIn("initial = True", content)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import io
|
||||
import os
|
||||
import re
|
||||
import types
|
||||
|
@ -269,10 +268,10 @@ def create_path(filename):
|
|||
|
||||
# Add valid and invalid URL tests.
|
||||
# This only tests the validator without extended schemes.
|
||||
with io.open(create_path('valid_urls.txt'), encoding='utf8') as f:
|
||||
with open(create_path('valid_urls.txt'), encoding='utf8') as f:
|
||||
for url in f:
|
||||
TEST_DATA.append((URLValidator(), url.strip(), None))
|
||||
with io.open(create_path('invalid_urls.txt'), encoding='utf8') as f:
|
||||
with open(create_path('invalid_urls.txt'), encoding='utf8') as f:
|
||||
for url in f:
|
||||
TEST_DATA.append((URLValidator(), url.strip(), ValidationError))
|
||||
|
||||
|
|
Loading…
Reference in New Issue