Ensure files get closed when they go out of scope

This is automatic in CPython but not guaranteed by the language. Be friendly
to other Python implementations.

Signed-off-by: Gilles Peskine <Gilles.Peskine@arm.com>
diff --git a/scripts/abi_check.py b/scripts/abi_check.py
index 1f6ac53..f11cdf2 100755
--- a/scripts/abi_check.py
+++ b/scripts/abi_check.py
@@ -249,32 +249,33 @@
         at_paragraph_start = True
         description = None
         full_path = os.path.join(directory, filename)
-        for line_number, line in enumerate(open(full_path), 1):
-            line = line.strip()
-            if not line:
-                at_paragraph_start = True
-                continue
-            if line.startswith('#'):
-                continue
-            if at_paragraph_start:
-                description = line.strip()
-                at_paragraph_start = False
-                continue
-            if line.startswith('depends_on:'):
-                continue
-            # We've reached a test case data line
-            test_case_data = self._normalize_storage_test_case_data(line)
-            if not is_generated:
-                # In manual test data, only look at read tests.
-                function_name = test_case_data.split(':', 1)[0]
-                if 'read' not in function_name.split('_'):
+        with open(full_path) as fd:
+            for line_number, line in enumerate(fd, 1):
+                line = line.strip()
+                if not line:
+                    at_paragraph_start = True
                     continue
-            metadata = SimpleNamespace(
-                filename=filename,
-                line_number=line_number,
-                description=description
-            )
-            storage_tests[test_case_data] = metadata
+                if line.startswith('#'):
+                    continue
+                if at_paragraph_start:
+                    description = line.strip()
+                    at_paragraph_start = False
+                    continue
+                if line.startswith('depends_on:'):
+                    continue
+                # We've reached a test case data line
+                test_case_data = self._normalize_storage_test_case_data(line)
+                if not is_generated:
+                    # In manual test data, only look at read tests.
+                    function_name = test_case_data.split(':', 1)[0]
+                    if 'read' not in function_name.split('_'):
+                        continue
+                metadata = SimpleNamespace(
+                    filename=filename,
+                    line_number=line_number,
+                    description=description
+                )
+                storage_tests[test_case_data] = metadata
 
     @staticmethod
     def _list_generated_test_data_files(git_worktree_path):
diff --git a/scripts/assemble_changelog.py b/scripts/assemble_changelog.py
index b8a63c9..7b036aa 100755
--- a/scripts/assemble_changelog.py
+++ b/scripts/assemble_changelog.py
@@ -407,14 +407,15 @@
     is also present in an output file. This is not perfect but good enough
     for now.
     """
-    generated_output = set(open(generated_output_file, 'r', encoding='utf-8'))
-    for line in open(main_input_file, 'r', encoding='utf-8'):
-        if line not in generated_output:
-            raise LostContent('original file', line)
-    for merged_file in merged_files:
-        for line in open(merged_file, 'r', encoding='utf-8'):
+    with open(generated_output_file, 'r', encoding='utf-8') as fd:
+        generated_output = set(fd)
+        for line in open(main_input_file, 'r', encoding='utf-8'):
             if line not in generated_output:
-                raise LostContent(merged_file, line)
+                raise LostContent('original file', line)
+        for merged_file in merged_files:
+            for line in open(merged_file, 'r', encoding='utf-8'):
+                if line not in generated_output:
+                    raise LostContent(merged_file, line)
 
 def finish_output(changelog, output_file, input_file, merged_files):
     """Write the changelog to the output file.
diff --git a/scripts/mbedtls_dev/macro_collector.py b/scripts/mbedtls_dev/macro_collector.py
index bf82f13..987779d 100644
--- a/scripts/mbedtls_dev/macro_collector.py
+++ b/scripts/mbedtls_dev/macro_collector.py
@@ -18,7 +18,7 @@
 
 import itertools
 import re
-from typing import Dict, Iterable, Iterator, List, Optional, Pattern, Set, Tuple, Union
+from typing import Dict, IO, Iterable, Iterator, List, Optional, Pattern, Set, Tuple, Union
 
 
 class ReadFileLineException(Exception):
@@ -50,12 +50,13 @@
     """
     def __init__(self, filename: str, binary: bool = False) -> None:
         self.filename = filename
+        self.file = None #type: Optional[IO[str]]
         self.line_number = 'entry' #type: Union[int, str]
         self.generator = None #type: Optional[Iterable[Tuple[int, str]]]
         self.binary = binary
     def __enter__(self) -> 'read_file_lines':
-        self.generator = enumerate(open(self.filename,
-                                        'rb' if self.binary else 'r'))
+        self.file = open(self.filename, 'rb' if self.binary else 'r')
+        self.generator = enumerate(self.file)
         return self
     def __iter__(self) -> Iterator[str]:
         assert self.generator is not None
@@ -64,6 +65,8 @@
             yield content
         self.line_number = 'exit'
     def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
+        if self.file is not None:
+            self.file.close()
         if exc_type is not None:
             raise ReadFileLineException(self.filename, self.line_number) \
                 from exc_value