diff --git a/tests/assets/update_file_name_test/add_media_file_1.jpg b/tests/assets/update_file_name_test/add_media_file_1.jpg new file mode 100644 index 0000000..3ce01d6 Binary files /dev/null and b/tests/assets/update_file_name_test/add_media_file_1.jpg differ diff --git a/tests/assets/update_file_name_test/add_media_file_2.jpg b/tests/assets/update_file_name_test/add_media_file_2.jpg new file mode 100644 index 0000000..3ce01d6 Binary files /dev/null and b/tests/assets/update_file_name_test/add_media_file_2.jpg differ diff --git a/tests/assets/update_file_name_test/create.csv b/tests/assets/update_file_name_test/create.csv new file mode 100644 index 0000000..9bb7e32 --- /dev/null +++ b/tests/assets/update_file_name_test/create.csv @@ -0,0 +1,3 @@ +file,id,title,field_model +add_media_file_1.jpg,1,title1,Collection +add_media_file_2.jpg,2,title2,Collection \ No newline at end of file diff --git a/tests/assets/update_file_name_test/create.yml b/tests/assets/update_file_name_test/create.yml new file mode 100644 index 0000000..19199dc --- /dev/null +++ b/tests/assets/update_file_name_test/create.yml @@ -0,0 +1,7 @@ +task: create +host: https://islandora.traefik.me +username: admin +password: password +input_dir: "tests/assets/update_file_name_test" +input_csv: create.csv +standalone_media_url: true \ No newline at end of file diff --git a/tests/assets/update_file_name_test/update.yml b/tests/assets/update_file_name_test/update.yml new file mode 100644 index 0000000..47928d9 --- /dev/null +++ b/tests/assets/update_file_name_test/update.yml @@ -0,0 +1,7 @@ +task: update_files +host: https://islandora.traefik.me +username: admin +password: password +input_dir: "tests/assets/update_file_name_test" +input_csv: update_files.csv +standalone_media_url: true \ No newline at end of file diff --git a/tests/islandora_tests.py b/tests/islandora_tests.py index badee46..f9d93d8 100644 --- a/tests/islandora_tests.py +++ b/tests/islandora_tests.py @@ -1515,6 +1515,145 @@ def tearDown(self): os.remove(self.true_with_additional_files_log_file_path) +class TestUpdateFileName(unittest.TestCase): + + def setUp(self): + self.current_dir = os.path.dirname(os.path.abspath(__file__)) + self.create_config_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", "create.yml" + ) + self.create_csv_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", "create.csv" + ) + self.create_cmd = ["./workbench", "--config", self.create_config_file_path] + self.nids = list() + self.fids = list() + + self.update_csv_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", "update_files.csv" + ) + self.update_config_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", "update.yml" + ) + self.update_cmd = ["./workbench", "--config", self.update_config_file_path] + + parser = argparse.ArgumentParser() + parser.add_argument("--config") + parser.add_argument("--check") + parser.add_argument("--get_csv_template") + parser.add_argument("--quick_delete_node") + parser.set_defaults(config=self.create_config_file_path, check=False) + args = parser.parse_args(sys.argv[1:-1]) + workbench_config = WorkbenchConfig(args) + config = workbench_config.get_config() + self.config = config + + self.temp_dir = tempfile.gettempdir() + + def test_update(self): + create_output = subprocess.check_output(self.create_cmd) + self.create_output = create_output.decode().strip() + + create_lines = self.create_output.splitlines() + for line in create_lines: + if "created at" in line: + nid = line.rsplit("/", 1)[-1] + nid = nid.strip(".") + self.nids.append(nid) + self.assertEqual(len(self.nids), 2) + test_file_names = ["add_media_file_1.jpg", "add_media_file_2.jpg"] + + i = 0 + with open(self.create_csv_file_path): + csv_data = workbench_utils.get_csv_data(self.config) + for row in csv_data: + self.test_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", test_file_names[i] + ) + with open(self.test_file_path): + file_id = workbench_utils.create_file( + self.config, test_file_names[i], "file", row, row["id"] + ) + self.assertIsNot(file_id, False) + self.fids.append(file_id) + i = i + 1 + + # Hit URL to see if file was created + i = 0 + while i <= 1: + file = {} + file_endpoint = ( + self.config["host"] + + "/entity/file/" + + str(self.fids[i]) + + "?_format=json" + ) + file_headers = {"Content-Type": "application/json"} + file_response = workbench_utils.issue_request( + self.config, "GET", file_endpoint, file_headers, file + ) + self.assertEqual(file_response.status_code, 200) + i = i + 1 + + # Write out an update_files CSV + update_csv_file_rows = list() + new_file_names = ["new_file_name_1.jpg", "new_file_name_2.jpg"] + update_csv_file_rows.append("file_id,filename") + i = 0 + while i <= 1: + update_csv_file_rows.append(f"{self.fids[i]},{new_file_names[i]}") + i = i + 1 + with open(self.update_csv_file_path, mode="wt") as update_csv_file: + update_csv_file.write("\n".join(update_csv_file_rows)) + subprocess.check_output(self.update_cmd) + + # Hit URL to see if file was successfully renamed + i = 0 + while i <= 1: + file = {} + file_endpoint = ( + self.config["host"] + + "/entity/file/" + + str(self.fids[i]) + + "?_format=json" + ) + file_headers = {"Content-Type": "application/json"} + file_response = workbench_utils.issue_request( + self.config, "GET", file_endpoint, file_headers, file + ) + self.assertEqual(file_response.status_code, 200) + self.assertEqual( + file_response.json()["filename"][0]["value"], new_file_names[i] + ) + i = i + 1 + + def tearDown(self): + for nid in self.nids: + quick_delete_cmd = [ + "./workbench", + "--config", + self.create_config_file_path, + "--quick_delete_node", + "https://islandora.traefik.me/node/" + nid, + ] + quick_delete_output = subprocess.check_output(quick_delete_cmd) + + self.rollback_file_path = os.path.join( + self.current_dir, "assets", "update_file_name_test", "rollback.csv" + ) + if os.path.exists(self.rollback_file_path): + os.remove(self.rollback_file_path) + + self.preprocessed_file_path = os.path.join( + self.temp_dir, "create.csv.preprocessed" + ) + if os.path.exists(self.preprocessed_file_path): + os.remove(self.preprocessed_file_path) + + if os.path.exists(self.update_csv_file_path): + os.remove(self.update_csv_file_path) + + class TestUpdateMediaFields(unittest.TestCase): """Create a couple nodes plus image media, update the media's field_original_name and field_width fields, then confirm they were updated by GETting the media's JSON. diff --git a/workbench b/workbench index 737a544..e1d6834 100755 --- a/workbench +++ b/workbench @@ -3347,6 +3347,83 @@ def update_terms(): pbar(row_position) +def update_files(): + """Update files via PATCH. Note that PATCHing replaces the target field, + so if we are adding an additional value to a multivalued field, we need + to include the existing value(s) in our PATCH. The field classes take + care of preserving existing values in 'append' updates. + """ + message = ( + '"Update Files" (' + + config["update_mode"] + + ") task started using config file " + + args.config + + "." + ) + print(message) + logging.info(message) + + csv_data = get_csv_data(config) + csv_column_headers = csv_data.fieldnames + + if "file_id" not in csv_column_headers: + message = ( + 'For "update_files" tasks, your CSV file must contain a "file_id" column.' + ) + logging.error(message) + sys.exit("Error: " + message) + method = "PATCH" + row_count = 0 + for row in csv_data: + # Delete expired items from request_cache before processing a row. + if config["enable_http_cache"] is True: + requests_cache.delete(expired=True) + file = {} + if "filename" in csv_column_headers: + if len(row["filename"].strip()) != 0: + file["filename"] = [{"value": row["filename"]}] + + file_endpoint = ( + config["host"] + "/entity/file/" + row["file_id"] + "?_format=json" + ) + file_headers = {"Content-Type": "application/json"} + file_response = issue_request( + config, method.upper(), file_endpoint, file_headers, file + ) + + if file_response.status_code == 200: + if config["progress_bar"] is False: + print( + "File " + + config["host"] + + "/entity/file/" + + row["file_id"] + + " updated." + ) + logging.info( + "File %s updated.", + config["host"] + "/entity/file/" + row["file_id"], + ) + else: + if config["progress_bar"] is False: + print( + "File " + + config["host"] + + "/entity/file/" + + row["file_id"] + + " could not be updated." + ) + logging.info( + "File %s could not be updated.", + config["host"] + "/entity/file/" + row["file_id"], + ) + + if config["progress_bar"] is True: + row_count += 1 + row_position = get_percentage(row_count, num_csv_records) + pbar(row_position) + + def create_redirects(): """Create new redirect entities via POST.""" message = '"Create redirects" task started using config file ' + args.config + "." @@ -3783,6 +3860,8 @@ try: update_media() if config["task"] == "update_terms": update_terms() + if config["task"] == "update_files": + update_files() if config["task"] == "create_redirects": create_redirects() diff --git a/workbench_utils.py b/workbench_utils.py index e6c3d6c..6058739 100644 --- a/workbench_utils.py +++ b/workbench_utils.py @@ -1858,12 +1858,13 @@ def check_input(config, args): "get_media_report_from_view", "update_terms", "create_redirects", + "update_files", ] joiner = ", " if config["task"] not in tasks: message = ( '"task" in your configuration file must be one of "create", "update", "delete", ' - + '"add_media", "update_media", "delete_media", "delete_media_by_node", "create_from_files", "create_terms", "export_csv", "get_data_from_view", "update_terms", or "create_redirects".' + + '"add_media", "update_media", "delete_media", "delete_media_by_node", "create_from_files", "create_terms", "export_csv", "get_data_from_view", "update_terms", "create_redirects", or "update_files".' ) logging.error(message) sys.exit("Error: " + message)