Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use with statements to eliminate ResourceWarnings #560

Merged
merged 1 commit into from Sep 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 2 additions & 1 deletion tests/lib/test_appliance.py
Expand Up @@ -103,7 +103,8 @@ def display(results, verbose):
for filename in filenames:
sys.stdout.write('-'*75+'\n')
sys.stdout.write('%s:\n' % filename)
data = open(filename, 'r', errors='replace').read()
with open(filename, 'r', errors='replace') as file:
data = file.read()
sys.stdout.write(data)
if data and data[-1] != '\n':
sys.stdout.write('\n')
Expand Down
9 changes: 6 additions & 3 deletions tests/lib/test_canonical.py
Expand Up @@ -2,7 +2,8 @@
import yaml, canonical

def test_canonical_scanner(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read()
with open(canonical_filename, 'rb') as file:
data = file.read()
tokens = list(yaml.canonical_scan(data))
assert tokens, tokens
if verbose:
Expand All @@ -12,7 +13,8 @@ def test_canonical_scanner(canonical_filename, verbose=False):
test_canonical_scanner.unittest = ['.canonical']

def test_canonical_parser(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read()
with open(canonical_filename, 'rb') as file:
data = file.read()
events = list(yaml.canonical_parse(data))
assert events, events
if verbose:
Expand All @@ -22,7 +24,8 @@ def test_canonical_parser(canonical_filename, verbose=False):
test_canonical_parser.unittest = ['.canonical']

def test_canonical_error(data_filename, canonical_filename, verbose=False):
data = open(data_filename, 'rb').read()
with open(data_filename, 'rb') as file:
data = file.read()
try:
output = list(yaml.canonical_load_all(data))
except yaml.YAMLError as exc:
Expand Down
9 changes: 6 additions & 3 deletions tests/lib/test_constructor.py
Expand Up @@ -257,10 +257,12 @@ def test_constructor_types(data_filename, code_filename, verbose=False):
native1 = None
native2 = None
try:
native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader))
with open(data_filename, 'rb') as file:
native1 = list(yaml.load_all(file, Loader=MyLoader))
if len(native1) == 1:
native1 = native1[0]
native2 = _load_code(open(code_filename, 'rb').read())
with open(code_filename, 'rb') as file:
native2 = _load_code(file.read())
try:
if native1 == native2:
return
Expand All @@ -284,7 +286,8 @@ def test_constructor_types(data_filename, code_filename, verbose=False):
def test_subclass_blacklist_types(data_filename, verbose=False):
_make_objects()
try:
yaml.load(open(data_filename, 'rb').read(), MyFullLoader)
with open(data_filename, 'rb') as file:
yaml.load(file.read(), MyFullLoader)
except yaml.YAMLError as exc:
if verbose:
print("%s:" % exc.__class__.__name__, exc)
Expand Down
12 changes: 8 additions & 4 deletions tests/lib/test_emitter.py
Expand Up @@ -15,7 +15,8 @@ def _compare_events(events1, events2):
assert event1.value == event2.value, (event1, event2)

def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
events = list(yaml.parse(open(data_filename, 'rb')))
with open(data_filename, 'rb') as file:
events = list(yaml.parse(file))
output = yaml.emit(events)
if verbose:
print("OUTPUT:")
Expand All @@ -26,7 +27,8 @@ def test_emitter_on_data(data_filename, canonical_filename, verbose=False):
test_emitter_on_data.unittest = ['.data', '.canonical']

def test_emitter_on_canonical(canonical_filename, verbose=False):
events = list(yaml.parse(open(canonical_filename, 'rb')))
with open(canonical_filename, 'rb') as file:
events = list(yaml.parse(file))
for canonical in [False, True]:
output = yaml.emit(events, canonical=canonical)
if verbose:
Expand All @@ -39,7 +41,8 @@ def test_emitter_on_canonical(canonical_filename, verbose=False):

def test_emitter_styles(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
events = list(yaml.parse(open(filename, 'rb')))
with open(filename, 'rb') as file:
events = list(yaml.parse(file))
for flow_style in [False, True]:
for style in ['|', '>', '"', '\'', '']:
styled_events = []
Expand Down Expand Up @@ -86,7 +89,8 @@ def construct_event(self, node):
EventsLoader.add_constructor(None, EventsLoader.construct_event)

def test_emitter_events(events_filename, verbose=False):
events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader))
with open(events_filename, 'rb') as file:
events = list(yaml.load(file, Loader=EventsLoader))
output = yaml.emit(events)
if verbose:
print("OUTPUT:")
Expand Down
16 changes: 10 additions & 6 deletions tests/lib/test_errors.py
Expand Up @@ -3,7 +3,8 @@

def test_loader_error(error_filename, verbose=False):
try:
list(yaml.load_all(open(error_filename, 'rb'), yaml.FullLoader))
with open(error_filename, 'rb') as file:
list(yaml.load_all(file, yaml.FullLoader))
except yaml.YAMLError as exc:
if verbose:
print("%s:" % exc.__class__.__name__, exc)
Expand All @@ -14,7 +15,8 @@ def test_loader_error(error_filename, verbose=False):

def test_loader_error_string(error_filename, verbose=False):
try:
list(yaml.load_all(open(error_filename, 'rb').read(), yaml.FullLoader))
with open(error_filename, 'rb') as file:
list(yaml.load_all(file.read(), yaml.FullLoader))
except yaml.YAMLError as exc:
if verbose:
print("%s:" % exc.__class__.__name__, exc)
Expand All @@ -25,7 +27,8 @@ def test_loader_error_string(error_filename, verbose=False):

def test_loader_error_single(error_filename, verbose=False):
try:
yaml.load(open(error_filename, 'rb').read(), yaml.FullLoader)
with open(error_filename, 'rb') as file:
yaml.load(file.read(), yaml.FullLoader)
except yaml.YAMLError as exc:
if verbose:
print("%s:" % exc.__class__.__name__, exc)
Expand All @@ -35,8 +38,8 @@ def test_loader_error_single(error_filename, verbose=False):
test_loader_error_single.unittest = ['.single-loader-error']

def test_emitter_error(error_filename, verbose=False):
events = list(yaml.load(open(error_filename, 'rb'),
Loader=test_emitter.EventsLoader))
with open(error_filename, 'rb') as file:
events = list(yaml.load(file, Loader=test_emitter.EventsLoader))
try:
yaml.emit(events)
except yaml.YAMLError as exc:
Expand All @@ -48,7 +51,8 @@ def test_emitter_error(error_filename, verbose=False):
test_emitter_error.unittest = ['.emitter-error']

def test_dumper_error(error_filename, verbose=False):
code = open(error_filename, 'rb').read()
with open(error_filename, 'rb') as file:
code = file.read()
try:
import yaml
from io import StringIO
Expand Down
31 changes: 18 additions & 13 deletions tests/lib/test_input_output.py
Expand Up @@ -3,7 +3,8 @@
import codecs, io, tempfile, os, os.path

def test_unicode_input(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8')
with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
value = ' '.join(data.split())
output = yaml.full_load(data)
assert output == value, (output, value)
Expand All @@ -23,7 +24,8 @@ def test_unicode_input(unicode_filename, verbose=False):
test_unicode_input.unittest = ['.unicode']

def test_unicode_input_errors(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8')
with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
for input in [data.encode('utf-16-be'),
data.encode('utf-16-le'),
codecs.BOM_UTF8+data.encode('utf-16-be'),
Expand All @@ -47,7 +49,8 @@ def test_unicode_input_errors(unicode_filename, verbose=False):
test_unicode_input_errors.unittest = ['.unicode']

def test_unicode_output(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8')
with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
value = ' '.join(data.split())
for allow_unicode in [False, True]:
data1 = yaml.dump(value, allow_unicode=allow_unicode)
Expand Down Expand Up @@ -82,7 +85,8 @@ def test_unicode_output(unicode_filename, verbose=False):
test_unicode_output.unittest = ['.unicode']

def test_file_output(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8')
with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
handle, filename = tempfile.mkstemp()
os.close(handle)
try:
Expand All @@ -92,14 +96,14 @@ def test_file_output(unicode_filename, verbose=False):
stream = io.BytesIO()
yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
data2 = stream.getvalue().decode('utf-16-le')[1:]
stream = open(filename, 'w', encoding='utf-16-le')
yaml.dump(data, stream, allow_unicode=True)
stream.close()
data3 = open(filename, 'r', encoding='utf-16-le').read()
stream = open(filename, 'wb')
yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
stream.close()
data4 = open(filename, 'r', encoding='utf-8').read()
with open(filename, 'w', encoding='utf-16-le') as stream:
yaml.dump(data, stream, allow_unicode=True)
with open(filename, 'r', encoding='utf-16-le') as file:
data3 = file.read()
with open(filename, 'wb') as stream:
yaml.dump(data, stream, encoding='utf-8', allow_unicode=True)
with open(filename, 'r', encoding='utf-8') as file:
data4 = file.read()
assert data1 == data2, (data1, data2)
assert data1 == data3, (data1, data3)
assert data1 == data4, (data1, data4)
Expand All @@ -110,7 +114,8 @@ def test_file_output(unicode_filename, verbose=False):
test_file_output.unittest = ['.unicode']

def test_unicode_transfer(unicode_filename, verbose=False):
data = open(unicode_filename, 'rb').read().decode('utf-8')
with open(unicode_filename, 'rb') as file:
data = file.read().decode('utf-8')
for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
input = data
if encoding is not None:
Expand Down
3 changes: 2 additions & 1 deletion tests/lib/test_mark.py
Expand Up @@ -2,7 +2,8 @@
import yaml

def test_marks(marks_filename, verbose=False):
inputs = open(marks_filename, 'r').read().split('---\n')[1:]
with open(marks_filename, 'r') as file:
inputs = file.read().split('---\n')[1:]
for input in inputs:
index = 0
line = 0
Expand Down
6 changes: 4 additions & 2 deletions tests/lib/test_multi_constructor.py
Expand Up @@ -29,8 +29,10 @@ class Multi2(yaml.FullLoader):
pass

def test_multi_constructor(input_filename, code_filename, verbose=False):
input = open(input_filename, 'rb').read().decode('utf-8')
native = _load_code(open(code_filename, 'rb').read())
with open(input_filename, 'rb') as file:
input = file.read().decode('utf-8')
with open(code_filename, 'rb') as file:
native = _load_code(file.read())

# default multi constructor for ! and !! tags
Multi1.add_multi_constructor('!', myconstructor1)
Expand Down
12 changes: 8 additions & 4 deletions tests/lib/test_reader.py
Expand Up @@ -13,18 +13,22 @@ def _run_reader(data, verbose):
raise AssertionError("expected an exception")

def test_stream_error(error_filename, verbose=False):
_run_reader(open(error_filename, 'rb'), verbose)
_run_reader(open(error_filename, 'rb').read(), verbose)
with open(error_filename, 'rb') as file:
_run_reader(file, verbose)
with open(error_filename, 'rb') as file:
_run_reader(file.read(), verbose)
for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']:
try:
data = open(error_filename, 'rb').read().decode(encoding)
with open(error_filename, 'rb') as file:
data = file.read().decode(encoding)
break
except UnicodeDecodeError:
pass
else:
return
_run_reader(data, verbose)
_run_reader(open(error_filename, encoding=encoding), verbose)
with open(error_filename, encoding=encoding) as file:
_run_reader(file, verbose)

test_stream_error.unittest = ['.stream-error']

Expand Down
3 changes: 2 additions & 1 deletion tests/lib/test_recursive.py
Expand Up @@ -24,7 +24,8 @@ def __setstate__(self, state):

def test_recursive(recursive_filename, verbose=False):
context = globals().copy()
exec(open(recursive_filename, 'rb').read(), context)
with open(recursive_filename, 'rb') as file:
exec(file.read(), context)
value1 = context['value']
output1 = None
value2 = None
Expand Down
3 changes: 2 additions & 1 deletion tests/lib/test_representer.py
Expand Up @@ -7,7 +7,8 @@ def test_representer_types(code_filename, verbose=False):
test_constructor._make_objects()
for allow_unicode in [False, True]:
for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']:
native1 = test_constructor._load_code(open(code_filename, 'rb').read())
with open(code_filename, 'rb') as file:
native1 = test_constructor._load_code(file.read())
native2 = None
try:
output = yaml.dump(native1, Dumper=test_constructor.MyDumper,
Expand Down
26 changes: 16 additions & 10 deletions tests/lib/test_resolver.py
Expand Up @@ -6,8 +6,10 @@ def test_implicit_resolver(data_filename, detect_filename, verbose=False):
correct_tag = None
node = None
try:
correct_tag = open(detect_filename, 'r').read().strip()
node = yaml.compose(open(data_filename, 'rb'))
with open(detect_filename, 'r') as file:
correct_tag = file.read().strip()
with open(data_filename, 'rb') as file:
node = yaml.compose(file)
assert isinstance(node, yaml.SequenceNode), node
for scalar in node.value:
assert isinstance(scalar, yaml.ScalarNode), scalar
Expand Down Expand Up @@ -58,8 +60,10 @@ def _convert_node(node):

def test_path_resolver_loader(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper()
nodes1 = list(yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader))
nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read()))
with open(data_filename, 'rb') as file:
nodes1 = list(yaml.compose_all(file.read(), Loader=MyLoader))
with open(path_filename, 'rb') as file:
nodes2 = list(yaml.compose_all(file.read()))
try:
for node1, node2 in zip(nodes1, nodes2):
data1 = _convert_node(node1)
Expand All @@ -74,15 +78,17 @@ def test_path_resolver_loader(data_filename, path_filename, verbose=False):
def test_path_resolver_dumper(data_filename, path_filename, verbose=False):
_make_path_loader_and_dumper()
for filename in [data_filename, path_filename]:
output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper)
with open(filename, 'rb') as file:
output = yaml.serialize_all(yaml.compose_all(file), Dumper=MyDumper)
if verbose:
print(output)
nodes1 = yaml.compose_all(output)
nodes2 = yaml.compose_all(open(data_filename, 'rb'))
for node1, node2 in zip(nodes1, nodes2):
data1 = _convert_node(node1)
data2 = _convert_node(node2)
assert data1 == data2, (data1, data2)
with open(data_filename, 'rb') as file:
nodes2 = yaml.compose_all(file)
for node1, node2 in zip(nodes1, nodes2):
nitzmahone marked this conversation as resolved.
Show resolved Hide resolved
data1 = _convert_node(node1)
data2 = _convert_node(node2)
assert data1 == data2, (data1, data2)

test_path_resolver_dumper.unittest = ['.data', '.path']

Expand Down
6 changes: 4 additions & 2 deletions tests/lib/test_sort_keys.py
Expand Up @@ -3,8 +3,10 @@
import sys

def test_sort_keys(input_filename, sorted_filename, verbose=False):
input = open(input_filename, 'rb').read().decode('utf-8')
sorted = open(sorted_filename, 'rb').read().decode('utf-8')
with open(input_filename, 'rb') as file:
input = file.read().decode('utf-8')
with open(sorted_filename, 'rb') as file:
sorted = file.read().decode('utf-8')
data = yaml.load(input, Loader=yaml.FullLoader)
dump_sorted = yaml.dump(data, default_flow_style=False, sort_keys=True)
dump_unsorted = yaml.dump(data, default_flow_style=False, sort_keys=False)
Expand Down