Skip to content

Commit

Permalink
pklファイルの置換完了(NPYファイルとCaimanのエラーWIP)
Browse files Browse the repository at this point in the history
  • Loading branch information
tsuchiyama-araya committed Dec 24, 2024
1 parent 11f6a22 commit c039509
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 14 deletions.
8 changes: 4 additions & 4 deletions frontend/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -7404,8 +7404,8 @@ internal-slot@^1.0.7, internal-slot@^1.1.0:
integrity sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==
dependencies:
es-errors "^1.3.0"
hasown "^2.0.0"
side-channel "^1.0.4"
hasown "^2.0.2"
side-channel "^1.1.0"

invariant@^2.2.4:
version "2.2.4"
Expand Down Expand Up @@ -7492,7 +7492,7 @@ is-callable@^1.1.3, is-callable@^1.2.7:
resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055"
integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==

is-core-module@^2.13.0, is-core-module@^2.15.1, is-core-module@^2.16.0:
is-core-module@^2.13.0, is-core-module@^2.15.1:
version "2.16.0"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.0.tgz#6c01ffdd5e33c49c1d2abfa93334a85cb56bd81c"
integrity sha512-urTSINYfAYgcbLb0yDQ6egFm6h3Mo1DcF9EkyXSRjjzdHbsulg01qhwWuXdOoUBuTkbQ80KDboXa0vFJ+BDH+g==
Expand Down Expand Up @@ -11082,7 +11082,7 @@ resolve@^1.0.0, resolve@^1.1.10, resolve@^1.1.5, resolve@^1.1.7, resolve@^1.14.2
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
dependencies:
is-core-module "^2.16.0"
is-core-module "^2.13.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"

Expand Down
90 changes: 80 additions & 10 deletions studio/app/common/core/experiment/experiment_writer.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import glob
import os
import pickle
import re
import shutil
from dataclasses import asdict
from datetime import datetime
from typing import Dict

import numpy as np
import yaml
from fastapi import Path

Check failure on line 12 in studio/app/common/core/experiment/experiment_writer.py

View workflow job for this annotation

GitHub Actions / flake8

'fastapi.Path' imported but unused

from studio.app.common.core.experiment.experiment import ExptConfig, ExptFunction
from studio.app.common.core.experiment.experiment_builder import ExptConfigBuilder
Expand Down Expand Up @@ -200,41 +203,108 @@ def __replace_unique_id_in_files(
logger = AppLogger.get_logger()

try:
# Use glob to recursively search for all .yaml files
yaml_files = glob.glob(
# Collect targeted files
targeted_files_yaml = glob.glob(
os.path.join(directory, "**", "*.yaml"), recursive=True
)
targeted_files_npy = glob.glob(
os.path.join(directory, "**", "*.npy"), recursive=True
)
targeted_files_pkl = glob.glob(
os.path.join(directory, "**", "*.pkl"), recursive=True
)

# Define a regex pattern to safely match the unique_id in paths or keys
unique_id_pattern = rf"(\b{re.escape(old_unique_id)}\b)"

for file_path in yaml_files:
# Process .yaml files (unchanged)
for file_path in targeted_files_yaml:
try:
# Read the file content
with open(
file_path, "r", encoding="utf-8", errors="ignore"
) as file:
content = file.read()

# Replace only exact matches of the unique_id with the new one
updated_content, count = re.subn(
unique_id_pattern, new_unique_id, content
)

if count > 0: # If replacements were made
if count > 0:
with open(file_path, "w", encoding="utf-8") as file:
file.write(updated_content)

logger.info(
f"Updated unique_id in {file_path} ({count} replacements)"
)
except Exception as file_error:
logger.warning(f"Failed to process {file_path}: {file_error}")

# Helper function for recursive replacement in complex objects
def replace_ids_recursive(obj):
try:
if isinstance(obj, dict):
return {
key: replace_ids_recursive(value)
for key, value in obj.items()
}
elif isinstance(obj, list):
return [replace_ids_recursive(item) for item in obj]
elif isinstance(obj, str) and old_unique_id in obj:
return obj.replace(old_unique_id, new_unique_id)
elif hasattr(obj, "__dict__"):
for attr, value in obj.__dict__.items():
logger.info(f"attr: {attr}, value: {value}")
setattr(obj, attr, replace_ids_recursive(value))
if hasattr(value, "__dict__"):
for sub_attr, sub_value in value.__dict__.items():
setattr(
value,
sub_attr,
replace_ids_recursive(sub_value),
)
logger.info(f"Replaced unique_id in object: {obj}")
return obj
else:
return obj
except Exception as e:
logger.error(f"Error replacing unique_id in object: {e}")
return obj

# Process .pkl files
for file_path in targeted_files_pkl:
try:
# Load the pickle file
with open(file_path, "rb") as file:
data = pickle.load(file)

# Replace IDs recursively
updated_data = replace_ids_recursive(data)

# Save the updated data back to the pickle file
with open(file_path, "wb") as file:
pickle.dump(updated_data, file)

logger.info(f"Updated unique_id in {file_path} (.pkl file)")
except Exception as file_error:
logger.warning(f"Failed to process {file_path}: {file_error}")

# Process .npy files
for file_path in targeted_files_npy:
try:
# Load the .npy file
with open(file_path, "rb") as file:
data = np.load(file, allow_pickle=True)

# Replace IDs recursively
updated_data = replace_ids_recursive(data)

# Save the updated data back to the .npy file
with open(file_path, "wb") as file:
np.save(file, updated_data, allow_pickle=True)

logger.info(f"Updated unique_id in {file_path} (.npy file)")
except Exception as file_error:
logger.warning(f"Failed to process {file_path}: {file_error}")

logger.info("All relevant files updated successfully.")
return True

except Exception as e:
logger.error(f"Error replacing unique_id in files: {e}")
return False
Expand Down

0 comments on commit c039509

Please sign in to comment.