Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade ruff dependency version #1305

Merged
merged 3 commits into from
Jan 10, 2025
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
231 changes: 209 additions & 22 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ python-on-whales = { version = "^0.68.0", optional = false }
requests = { version = ">=2.31", optional = false }
rich = { version = "^13.4.2", optional = false }
rich-click = { version = "^1.6.1", optional = false }
ruff = { version = "^0.4.8", optional = false } # Not a dev dep, needed for chains code gen.
ruff = { version = "^0.9.0", optional = false } # Not a dev dep, needed for chains code gen.
tenacity = { version = "^8.0.1", optional = false }
watchfiles = { version = "^0.19.0", optional = false }

Expand Down
2 changes: 1 addition & 1 deletion truss-chains/examples/audio-transcription/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ async def __aexit__(self, exc_type, exc, tb):
else "No stderr available."
)
raise ChildProcessError(
"FFMPEG hangs after terminating. Stderr:\n" f"{stderr}"
f"FFMPEG hangs after terminating. Stderr:\n{stderr}"
) from e

logging.debug(f"return code={self._process.returncode}.")
Expand Down
3 changes: 1 addition & 2 deletions truss-chains/tests/test_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,7 @@ def run_remote(self) -> str:
return self.chainlet1.run_remote()

match = (
"`chains.depends(Chainlet1)` was used, but not as "
"an argument to the `__init__`"
"`chains.depends(Chainlet1)` was used, but not as an argument to the `__init__`"
)
with pytest.raises(definitions.ChainsRuntimeError, match=re.escape(match)):
with chains.run_local():
Expand Down
6 changes: 3 additions & 3 deletions truss-chains/truss_chains/deployment/deployment_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,9 +552,9 @@ def __init__(
non_draft_chainlets = [
chainlet.name for chainlet in deployed_chainlets if not chainlet.is_draft
]
assert not (
non_draft_chainlets
), "If the chain is draft, the oracles must be draft."
assert not (non_draft_chainlets), (
"If the chain is draft, the oracles must be draft."
)

self._chainlet_data = {c.name: c for c in deployed_chainlets}
self._assert_chainlet_names_same(chainlet_names)
Expand Down
10 changes: 5 additions & 5 deletions truss-chains/truss_chains/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -978,9 +978,9 @@ def _detect_naive_instantiations(
if len(name_parts) > 1:
init_owner_class = name_parts[-2]
elif func_name == _INIT_LOCAL_NAME:
assert (
"init_owner_class" in local_vars
), f"`{_INIT_LOCAL_NAME}` must capture `init_owner_class`"
assert "init_owner_class" in local_vars, (
f"`{_INIT_LOCAL_NAME}` must capture `init_owner_class`"
)
init_owner_class = local_vars["init_owner_class"].__name__

if init_owner_class:
Expand Down Expand Up @@ -1019,8 +1019,8 @@ def _detect_naive_instantiations(
# Everything else is invalid.
location = (
f"{up_frame.filename}:{up_frame.lineno} ({up_frame.function})\n"
f" {up_frame.code_context[0].strip()}" # type: ignore[index]
)
f" {up_frame.code_context[0].strip()}"
) # type: ignore[index]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Doesn't this fail mypy now?

Copy link
Contributor Author

@nnarayen nnarayen Jan 10, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Synced offline - for now we'll use the semi hacky # fmt directive to avoid this, and a followup will introduce a slightly different way to generate location so we don't have this issue

raise definitions.ChainsRuntimeError(
_instantiation_error_msg(chainlet_descriptor.name, location)
)
Expand Down
3 changes: 1 addition & 2 deletions truss-chains/truss_chains/remote_chainlet/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,6 @@ async def async_response_raise_errors(
response_json = await response.json()
except Exception as e:
raise ValueError(
"Could not get JSON from error response. Status: "
f"`{response.status}`."
f"Could not get JSON from error response. Status: `{response.status}`."
) from e
_handle_response_error(response_json, remote_name, response.status)
38 changes: 21 additions & 17 deletions truss/contexts/image_builder/serving_image_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,18 +311,18 @@ def generate_docker_server_nginx_config(build_dir, config):
DOCKER_SERVER_TEMPLATES_DIR, "proxy.conf.jinja"
)

assert (
config.docker_server.predict_endpoint is not None
), "docker_server.predict_endpoint is required to use custom server"
assert (
config.docker_server.server_port is not None
), "docker_server.server_port is required to use custom server"
assert (
config.docker_server.readiness_endpoint is not None
), "docker_server.readiness_endpoint is required to use custom server"
assert (
config.docker_server.liveness_endpoint is not None
), "docker_server.liveness_endpoint is required to use custom server"
assert config.docker_server.predict_endpoint is not None, (
"docker_server.predict_endpoint is required to use custom server"
)
assert config.docker_server.server_port is not None, (
"docker_server.server_port is required to use custom server"
)
assert config.docker_server.readiness_endpoint is not None, (
"docker_server.readiness_endpoint is required to use custom server"
)
assert config.docker_server.liveness_endpoint is not None, (
"docker_server.liveness_endpoint is required to use custom server"
)

nginx_content = nginx_template.render(
server_endpoint=config.docker_server.predict_endpoint,
Expand All @@ -339,9 +339,9 @@ def generate_docker_server_supervisord_config(build_dir, config):
supervisord_template = read_template_from_fs(
DOCKER_SERVER_TEMPLATES_DIR, "supervisord.conf.jinja"
)
assert (
config.docker_server.start_command is not None
), "docker_server.start_command is required to use custom server"
assert config.docker_server.start_command is not None, (
"docker_server.start_command is required to use custom server"
)
supervisord_contents = supervisord_template.render(
start_command=config.docker_server.start_command,
)
Expand Down Expand Up @@ -376,7 +376,9 @@ def prepare_trtllm_encoder_build_dir(self, build_dir: Path):
config.trt_llm
and config.trt_llm.build
and config.trt_llm.build.base_model == TrussTRTLLMModel.ENCODER
), "prepare_trtllm_encoder_build_dir should only be called for encoder tensorrt-llm model"
), (
"prepare_trtllm_encoder_build_dir should only be called for encoder tensorrt-llm model"
)
# TRTLLM has performance degradation with batch size >> 32, so we limit the runtime settings
# runtime batch size may not be higher than what the build settings of the model allow
# to 32 even if the engine.rank0 allows for higher batch_size
Expand Down Expand Up @@ -417,7 +419,9 @@ def prepare_trtllm_decoder_build_dir(self, build_dir: Path):
config.trt_llm
and config.trt_llm.build
and config.trt_llm.build.base_model != TrussTRTLLMModel.ENCODER
), "prepare_trtllm_decoder_build_dir should only be called for decoder tensorrt-llm model"
), (
"prepare_trtllm_decoder_build_dir should only be called for decoder tensorrt-llm model"
)

# trt_llm is treated as an extension at model run time.
self._copy_into_build_dir(
Expand Down
16 changes: 8 additions & 8 deletions truss/remote/baseten/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,10 @@ def create_model_from_truss(
config: "{config}",
semver_bump: "{semver_bump}",
client_version: "{client_version}",
is_trusted: {'true' if is_trusted else 'false'},
allow_truss_download: {'true' if allow_truss_download else 'false'},
is_trusted: {"true" if is_trusted else "false"},
allow_truss_download: {"true" if allow_truss_download else "false"},
{f'version_name: "{deployment_name}"' if deployment_name else ""}
{f'model_origin: {origin.value}' if origin else ""}
{f"model_origin: {origin.value}" if origin else ""}
) {{
id,
name,
Expand Down Expand Up @@ -183,8 +183,8 @@ def create_model_version_from_truss(
config: "{config}",
semver_bump: "{semver_bump}",
client_version: "{client_version}",
is_trusted: {'true' if is_trusted else 'false'},
scale_down_old_production: {'false' if preserve_previous_prod_deployment else 'true'},
is_trusted: {"true" if is_trusted else "false"},
scale_down_old_production: {"false" if preserve_previous_prod_deployment else "true"},
{f'name: "{deployment_name}"' if deployment_name else ""}
{f'environment_name: "{environment}"' if environment else ""}
) {{
Expand Down Expand Up @@ -212,9 +212,9 @@ def create_development_model_from_truss(
s3_key: "{s3_key}",
config: "{config}",
client_version: "{client_version}",
is_trusted: {'true' if is_trusted else 'false'},
allow_truss_download: {'true' if allow_truss_download else 'false'},
{f'model_origin: {origin.value}' if origin else ""}
is_trusted: {"true" if is_trusted else "false"},
allow_truss_download: {"true" if allow_truss_download else "false"},
{f"model_origin: {origin.value}" if origin else ""}
) {{
id,
name,
Expand Down
12 changes: 6 additions & 6 deletions truss/templates/trtllm-audio/packages/whisper_trt/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,9 @@ async def log_mel_spectrogram(
def remove_tensor_padding(input_tensor, input_tensor_lengths=None, pad_value=0):
if input_tensor.dim() == 2:
# Text tensor case: batch, seq_len
assert torch.all(
input_tensor[:, 0] != pad_value
), "First token in each sequence should not be pad_value"
assert torch.all(input_tensor[:, 0] != pad_value), (
"First token in each sequence should not be pad_value"
)
assert input_tensor_lengths is None

# Create a mask for all non-pad tokens
Expand All @@ -214,9 +214,9 @@ def remove_tensor_padding(input_tensor, input_tensor_lengths=None, pad_value=0):

elif input_tensor.dim() == 3:
# Audio tensor case: batch, seq_len, feature_len
assert (
input_tensor_lengths is not None
), "input_tensor_lengths must be provided for 3D input_tensor"
assert input_tensor_lengths is not None, (
"input_tensor_lengths must be provided for 3D input_tensor"
)
batch_size, seq_len, feature_len = input_tensor.shape

# Initialize a list to collect valid sequences
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,6 @@ def _assert_copied(src_path: str, dest_path: str):
src_file = os.path.join(dirpath, filename)
dest_file = os.path.join(dest_path, rel_path, filename)
assert os.path.exists(dest_file), f"{dest_file} was not copied"
assert filecmp.cmp(
src_file, dest_file, shallow=False
), f"{src_file} and {dest_file} are not the same"
assert filecmp.cmp(src_file, dest_file, shallow=False), (
f"{src_file} and {dest_file} are not the same"
)
6 changes: 3 additions & 3 deletions truss/tests/templates/server/test_model_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,9 @@ async def test_trt_llm_truss_init_extension(trt_llm_truss_container_fs, helpers)
call_args[0][0] == "trt_llm"
for call_args in mock_init_extension.call_args_list
)
assert (
called_with_specific_extension
), "Expected extension_name was not called"
assert called_with_specific_extension, (
"Expected extension_name was not called"
)


@pytest.mark.anyio
Expand Down
14 changes: 7 additions & 7 deletions truss/tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def test_plugin_paged_context_fmha_check(trtllm_config):
"~/.huggingface/my--model--cache/model",
"foo.git",
"datasets/foo/bar",
".repo_id" "other..repo..id",
".repo_idother..repo..id",
],
)
def test_invalid_hf_repo(trtllm_config, repo):
Expand Down Expand Up @@ -515,9 +515,9 @@ def test_from_dict_spec_dec_trt_llm(should_raise, trtllm_spec_dec_config):
with pytest.raises(ValueError):
TrussConfig.from_dict(test_config)
test_config["trt_llm"]["build"]["speculator"]["checkpoint_repository"] = (
trtllm_spec_dec_config[
"trt_llm"
]["build"]["speculator"]["checkpoint_repository"]
trtllm_spec_dec_config["trt_llm"]["build"]["speculator"][
"checkpoint_repository"
]
)
test_config["trt_llm"]["build"]["plugin_configuration"][
"use_paged_context_fmha"
Expand All @@ -528,9 +528,9 @@ def test_from_dict_spec_dec_trt_llm(should_raise, trtllm_spec_dec_config):
"use_paged_context_fmha"
] = True
test_config["trt_llm"]["build"]["speculator"]["speculative_decoding_mode"] = (
trtllm_spec_dec_config[
"trt_llm"
]["build"]["speculator"]["speculative_decoding_mode"]
trtllm_spec_dec_config["trt_llm"]["build"]["speculator"][
"speculative_decoding_mode"
]
)
test_config["trt_llm"]["build"]["speculator"]["num_draft_tokens"] = None
with pytest.raises(ValueError):
Expand Down
3 changes: 2 additions & 1 deletion truss/tests/test_data/happy.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@
"\n",
"import numpy as np\n",
"import pandas as pd\n",
"import truss\n",
"from sklearn.linear_model import LogisticRegression\n",
"\n",
"import truss\n",
"\n",
"# 10 rows of 10 features\n",
"features = random.choices(range(10), k=10)\n",
"values = np.random.randint(0, 100, size=(10, 10))\n",
Expand Down
2 changes: 1 addition & 1 deletion truss/tests/util/test_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def test_get_ignored_relative_paths():
".git",
".git/HEAD",
"data/should_ignore.txt",
"data.txt" "config.yaml",
"data.txtconfig.yaml",
"model/model.py",
}

Expand Down
2 changes: 1 addition & 1 deletion truss/truss_handle/truss_handle.py
Original file line number Diff line number Diff line change
Expand Up @@ -1027,7 +1027,7 @@ def _try_patch(self):
)
resp = self.patch_container(patch_request)
if "error" in resp:
raise RuntimeError(f'Failed to patch control truss {resp["error"]}')
raise RuntimeError(f"Failed to patch control truss {resp['error']}")
self._store_signature()
return container

Expand Down
Loading