浏览代码

Apply review comments

Signed-off-by: Jack Curtis <[email protected]>
Jack Curtis 3 年之前
父节点
当前提交
275a4bab00

+ 38 - 19
scripts/build/TestImpactAnalysis/storage_query_tool/local_storage_query_tool.py

@@ -109,22 +109,19 @@ class LocalStorageQueryTool(StorageQueryTool):
             try:
                 json_obj = json.loads(file)
                 if not self._check_object_exists(str(storage_location)):
-                    with open(f"{storage_location}", "w", encoding="UTF-8") as raw_output_file:
-                        json.dump(json_obj, raw_output_file,
-                                ensure_ascii=False, indent=4)
+                    self._write_json_file(json_obj, storage_location)
                 else:
                     logger.info("Cancelling create, as file exists already")
             except json.JSONDecodeError:
                 logger.error("The historic data does not contain valid json")
-        if self._file_type == self.FileType.ZIP:
-            try:
-                if not self._check_object_exists(str(storage_location)):
-                    with open(f"{storage_location}", "wb") as raw_output_file:
-                        raw_output_file.write(file)
-                else:
-                    logger.info("Cancelling create, as file exists already")
-            except OSError as e:
-                logger.error(e)
+        elif self._file_type == self.FileType.ZIP:
+            if not self._check_object_exists(str(storage_location)):
+                self._write_zip_file(file, storage_location)
+            else:
+                logger.info("Cancelling create, as file exists already")
+        else:
+            raise SystemError(
+                "File type not specified or otherwise not passed through to SQT")
 
     def _update(self, file: str, storage_location: str):
         """
@@ -137,19 +134,41 @@ class LocalStorageQueryTool(StorageQueryTool):
             try:
                 json_obj = json.loads(file)
                 if self._check_object_exists(storage_location):
-                    with open(f"{storage_location}", "w", encoding="UTF-8") as raw_output_file:
-                        json.dump(json_obj, raw_output_file,
-                                ensure_ascii=False, indent=4)
+                    self._write_json_file(json_obj, storage_location)
                 else:
                     logger.info("Cancelling update, as file does not exist")
             except json.JSONDecodeError:
                 logger.error("The historic data does not contain valid json")
-        if self._file_type == self.FileType.ZIP:
+        elif self._file_type == self.FileType.ZIP:
             try:
                 if self._check_object_exists(str(storage_location)):
-                    with open(f"{storage_location}", "wb") as raw_output_file:
-                        raw_output_file.write(file)
+                    self._write_zip_file(file, storage_location)
                 else:
                     logger.info("Cancelling create, as file exists already")
             except OSError as e:
-                logger.error(e)
+                logger.error(e)
+
+    def _write_json_file(self, json_obj, storage_location):
+        """
+        Writes the provided json.dump compatible object to the specified location with formatting and UTF-8 encoding.
+        @param json_obj: Object compatible with json.dump to store
+        @param storage_location: Location to store the object.
+        """
+        try:
+            with open(f"{storage_location}", "w", encoding="UTF-8") as raw_output_file:
+                json.dump(json_obj, raw_output_file,
+                          ensure_ascii=False, indent=4)
+        except OSError as e:
+            logger.error(e)
+
+    def _write_zip_file(self, zip_file, storage_location):
+        """
+        Writes the provided zip file to the provided storage location using open in "write binary" mode.
+        @param zip_file: File to store
+        @param storage_location: Location to store file in.
+        """
+        try:
+            with open(f"{storage_location}", "wb") as raw_output_file:
+                raw_output_file.write(zip_file)
+        except OSError as e:
+            logger.error(e)

+ 3 - 1
scripts/build/TestImpactAnalysis/storage_query_tool/s3_storage_query_tool.py

@@ -128,8 +128,10 @@ class S3StorageQueryTool(StorageQueryTool):
                 raise e
             if self._file_type == self.FileType.JSON:
                 self._save_as_json_file(file_stream, destination)
-            if self._file_type == self.FileType.ZIP:
+            elif self._file_type == self.FileType.ZIP:
                 self._save_as_zip_file(file_stream, destination)
+            else:
+                raise SystemError("File type not specified or otherwise not passed through to SQT")
 
     def _save_as_zip_file(self, file_stream, destination: str):
         """

+ 1 - 2
scripts/build/TestImpactAnalysis/tiaf_tools.py

@@ -74,14 +74,13 @@ def parse_args():
 
     parser.add_argument(
         "--file-type",
-        choices=["json","zip"],
+        choices=["json", "zip"],
         help="What file type SQT should expect to be interacting with. Current options are zip and json.",
         required=True
     )
 
     return parser.parse_args()
 
-
 def run(args: dict):
     try:
         if args.get('s3_bucket'):