Skip to content

Commit

Permalink
Improve transformers-cli env reporting (huggingface#31003)
Browse files Browse the repository at this point in the history
* Improve `transformers-cli env` reporting

* move the line `"Using GPU in script?": "<fill in>"` to in if conditional
statement

* same option for npu
  • Loading branch information
statelesshz authored May 29, 2024
1 parent c3044ec commit c886137
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion src/transformers/commands/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
is_safetensors_available,
is_tf_available,
is_torch_available,
is_torch_npu_available,
)
from . import BaseTransformersCLICommand

Expand Down Expand Up @@ -88,6 +89,7 @@ def run(self):

pt_version = torch.__version__
pt_cuda_available = torch.cuda.is_available()
pt_npu_available = is_torch_npu_available()

tf_version = "not installed"
tf_cuda_available = "NA"
Expand Down Expand Up @@ -129,9 +131,15 @@ def run(self):
"Flax version (CPU?/GPU?/TPU?)": f"{flax_version} ({jax_backend})",
"Jax version": f"{jax_version}",
"JaxLib version": f"{jaxlib_version}",
"Using GPU in script?": "<fill in>",
"Using distributed or parallel set-up in script?": "<fill in>",
}
if pt_cuda_available:
info["Using GPU in script?"] = "<fill in>"
info["GPU type"] = torch.cuda.get_device_name()
elif pt_npu_available:
info["Using NPU in script?"] = "<fill in>"
info["NPU type"] = torch.npu.get_device_name()
info["CANN version"] = torch.version.cann

print("\nCopy-and-paste the text below in your GitHub issue and FILL OUT the two last points.\n")
print(self.format_dict(info))
Expand Down

0 comments on commit c886137

Please sign in to comment.