diff --git a/examples/flax/image-captioning/run_image_captioning_flax.py b/examples/flax/image-captioning/run_image_captioning_flax.py index 4552defb8efc45..348a719857830a 100644 --- a/examples/flax/image-captioning/run_image_captioning_flax.py +++ b/examples/flax/image-captioning/run_image_captioning_flax.py @@ -186,7 +186,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/language-modeling/run_bart_dlm_flax.py b/examples/flax/language-modeling/run_bart_dlm_flax.py index 5c8bf1bbc45dda..6396f4ced99695 100644 --- a/examples/flax/language-modeling/run_bart_dlm_flax.py +++ b/examples/flax/language-modeling/run_bart_dlm_flax.py @@ -172,7 +172,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/language-modeling/run_clm_flax.py b/examples/flax/language-modeling/run_clm_flax.py index 5fe786da7cc5ad..1a0428fdd67039 100755 --- a/examples/flax/language-modeling/run_clm_flax.py +++ b/examples/flax/language-modeling/run_clm_flax.py @@ -173,7 +173,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/language-modeling/run_mlm_flax.py b/examples/flax/language-modeling/run_mlm_flax.py index f3f3c324ecfea6..65f6a2285d9c34 100755 --- a/examples/flax/language-modeling/run_mlm_flax.py +++ b/examples/flax/language-modeling/run_mlm_flax.py @@ -172,7 +172,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/language-modeling/run_t5_mlm_flax.py b/examples/flax/language-modeling/run_t5_mlm_flax.py index a2906c410879b9..0030fc8da66a57 100755 --- a/examples/flax/language-modeling/run_t5_mlm_flax.py +++ b/examples/flax/language-modeling/run_t5_mlm_flax.py @@ -172,7 +172,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/question-answering/run_qa.py b/examples/flax/question-answering/run_qa.py index 0873b19413bfea..1b951e35839816 100644 --- a/examples/flax/question-answering/run_qa.py +++ b/examples/flax/question-answering/run_qa.py @@ -159,7 +159,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/summarization/run_summarization_flax.py b/examples/flax/summarization/run_summarization_flax.py index 856fd6fdb7b36a..c193fe0bc3745a 100644 --- a/examples/flax/summarization/run_summarization_flax.py +++ b/examples/flax/summarization/run_summarization_flax.py @@ -186,7 +186,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/text-classification/run_flax_glue.py b/examples/flax/text-classification/run_flax_glue.py index 7f5524dbb437d6..e0dfab2f52e994 100755 --- a/examples/flax/text-classification/run_flax_glue.py +++ b/examples/flax/text-classification/run_flax_glue.py @@ -105,7 +105,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/token-classification/run_flax_ner.py b/examples/flax/token-classification/run_flax_ner.py index 0a66b5f1990bc9..ad68c0997fed81 100644 --- a/examples/flax/token-classification/run_flax_ner.py +++ b/examples/flax/token-classification/run_flax_ner.py @@ -153,7 +153,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/flax/vision/run_image_classification.py b/examples/flax/vision/run_image_classification.py index 305dd3ac205f0c..3de3c977ab1d46 100644 --- a/examples/flax/vision/run_image_classification.py +++ b/examples/flax/vision/run_image_classification.py @@ -162,7 +162,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/audio-classification/run_audio_classification.py b/examples/pytorch/audio-classification/run_audio_classification.py index 6c2a6cb8803976..9ebd4fb00759f5 100644 --- a/examples/pytorch/audio-classification/run_audio_classification.py +++ b/examples/pytorch/audio-classification/run_audio_classification.py @@ -156,7 +156,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/contrastive-image-text/run_clip.py b/examples/pytorch/contrastive-image-text/run_clip.py index 22b420d856173c..d3c5355f9d07cf 100644 --- a/examples/pytorch/contrastive-image-text/run_clip.py +++ b/examples/pytorch/contrastive-image-text/run_clip.py @@ -90,7 +90,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/image-classification/run_image_classification.py b/examples/pytorch/image-classification/run_image_classification.py index f8c2c95f59592e..2d26e42604da03 100644 --- a/examples/pytorch/image-classification/run_image_classification.py +++ b/examples/pytorch/image-classification/run_image_classification.py @@ -145,7 +145,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/image-pretraining/run_mae.py b/examples/pytorch/image-pretraining/run_mae.py index aa8de32d8cb2ed..3ac4106b11acbf 100644 --- a/examples/pytorch/image-pretraining/run_mae.py +++ b/examples/pytorch/image-pretraining/run_mae.py @@ -137,7 +137,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/image-pretraining/run_mim.py b/examples/pytorch/image-pretraining/run_mim.py index f60b21600832e2..7626e8be363253 100644 --- a/examples/pytorch/image-pretraining/run_mim.py +++ b/examples/pytorch/image-pretraining/run_mim.py @@ -157,7 +157,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/language-modeling/run_clm.py b/examples/pytorch/language-modeling/run_clm.py index 53052d7671e061..ca992c04562e5e 100755 --- a/examples/pytorch/language-modeling/run_clm.py +++ b/examples/pytorch/language-modeling/run_clm.py @@ -114,7 +114,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/language-modeling/run_mlm.py b/examples/pytorch/language-modeling/run_mlm.py index dcc8bcd3cd955d..b635a7aea69881 100755 --- a/examples/pytorch/language-modeling/run_mlm.py +++ b/examples/pytorch/language-modeling/run_mlm.py @@ -111,7 +111,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/language-modeling/run_plm.py b/examples/pytorch/language-modeling/run_plm.py index 15ff8eb45f6ebb..4a885ee49661fd 100755 --- a/examples/pytorch/language-modeling/run_plm.py +++ b/examples/pytorch/language-modeling/run_plm.py @@ -99,7 +99,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/multiple-choice/run_swag.py b/examples/pytorch/multiple-choice/run_swag.py index 5771165cafeb8f..f9df919e1f92da 100755 --- a/examples/pytorch/multiple-choice/run_swag.py +++ b/examples/pytorch/multiple-choice/run_swag.py @@ -83,7 +83,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/question-answering/run_qa.py b/examples/pytorch/question-answering/run_qa.py index cddcb4891beff6..54db2b7bb12d66 100755 --- a/examples/pytorch/question-answering/run_qa.py +++ b/examples/pytorch/question-answering/run_qa.py @@ -83,7 +83,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/question-answering/run_qa_beam_search.py b/examples/pytorch/question-answering/run_qa_beam_search.py index 1c389e43f37759..ce110ae3646362 100755 --- a/examples/pytorch/question-answering/run_qa_beam_search.py +++ b/examples/pytorch/question-answering/run_qa_beam_search.py @@ -82,7 +82,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/question-answering/run_seq2seq_qa.py b/examples/pytorch/question-answering/run_seq2seq_qa.py index c3c85b31da2f0d..8ffe114dbb8644 100644 --- a/examples/pytorch/question-answering/run_seq2seq_qa.py +++ b/examples/pytorch/question-answering/run_seq2seq_qa.py @@ -83,7 +83,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/semantic-segmentation/run_semantic_segmentation.py b/examples/pytorch/semantic-segmentation/run_semantic_segmentation.py index 92d07f8f9199bc..bc1bfb2c1c0945 100644 --- a/examples/pytorch/semantic-segmentation/run_semantic_segmentation.py +++ b/examples/pytorch/semantic-segmentation/run_semantic_segmentation.py @@ -246,7 +246,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py index fdbed2b3ab09a4..36efb44138d9a6 100755 --- a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py +++ b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py @@ -233,7 +233,7 @@ class DataTrainingArguments: metadata={ "help": ( "If :obj:`True`, will use the token generated when running" - ":obj:`transformers-cli login` as HTTP bearer authorization for remote files." + ":obj:`huggingface-cli login` as HTTP bearer authorization for remote files." ) }, ) diff --git a/examples/pytorch/speech-recognition/run_speech_recognition_seq2seq.py b/examples/pytorch/speech-recognition/run_speech_recognition_seq2seq.py index 0ce8ff05508ea9..015c1f0a653222 100755 --- a/examples/pytorch/speech-recognition/run_speech_recognition_seq2seq.py +++ b/examples/pytorch/speech-recognition/run_speech_recognition_seq2seq.py @@ -89,7 +89,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/summarization/run_summarization.py b/examples/pytorch/summarization/run_summarization.py index 78d5b79ca4274a..5d6d5d5c771b3a 100755 --- a/examples/pytorch/summarization/run_summarization.py +++ b/examples/pytorch/summarization/run_summarization.py @@ -103,7 +103,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/text-classification/run_glue.py b/examples/pytorch/text-classification/run_glue.py index 556e1f3bbe66ef..49af0c85568c9b 100755 --- a/examples/pytorch/text-classification/run_glue.py +++ b/examples/pytorch/text-classification/run_glue.py @@ -192,7 +192,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/text-classification/run_xnli.py b/examples/pytorch/text-classification/run_xnli.py index 2450d24e3d0169..d4cfc3a77d0b6d 100755 --- a/examples/pytorch/text-classification/run_xnli.py +++ b/examples/pytorch/text-classification/run_xnli.py @@ -156,7 +156,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/token-classification/run_ner.py b/examples/pytorch/token-classification/run_ner.py index 13993e58a450f1..9000b5006e03fa 100755 --- a/examples/pytorch/token-classification/run_ner.py +++ b/examples/pytorch/token-classification/run_ner.py @@ -83,7 +83,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/pytorch/translation/run_translation.py b/examples/pytorch/translation/run_translation.py index a519fa17533591..af1868b25aad35 100755 --- a/examples/pytorch/translation/run_translation.py +++ b/examples/pytorch/translation/run_translation.py @@ -93,7 +93,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/layoutlmv3/run_funsd_cord.py b/examples/research_projects/layoutlmv3/run_funsd_cord.py index 66be61dffccf20..866f9a9c1b1163 100644 --- a/examples/research_projects/layoutlmv3/run_funsd_cord.py +++ b/examples/research_projects/layoutlmv3/run_funsd_cord.py @@ -81,7 +81,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/mlm_wwm/run_mlm_wwm.py b/examples/research_projects/mlm_wwm/run_mlm_wwm.py index 0afa4135537a85..f14ad5adfeff16 100644 --- a/examples/research_projects/mlm_wwm/run_mlm_wwm.py +++ b/examples/research_projects/mlm_wwm/run_mlm_wwm.py @@ -101,7 +101,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/quantization-qdqbert/run_quant_qa.py b/examples/research_projects/quantization-qdqbert/run_quant_qa.py index 97eece4c1d0ac9..5008197b8b845d 100755 --- a/examples/research_projects/quantization-qdqbert/run_quant_qa.py +++ b/examples/research_projects/quantization-qdqbert/run_quant_qa.py @@ -84,7 +84,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_bnb.py b/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_bnb.py index afa3397eb43000..5294e6a4a9aef9 100755 --- a/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_bnb.py +++ b/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_bnb.py @@ -231,7 +231,7 @@ class DataTrainingArguments: metadata={ "help": ( "If :obj:`True`, will use the token generated when running" - ":obj:`transformers-cli login` as HTTP bearer authorization for remote files." + ":obj:`huggingface-cli login` as HTTP bearer authorization for remote files." ) }, ) diff --git a/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_streaming.py b/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_streaming.py index 57f54048a52330..8add8fd20a72d9 100644 --- a/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_streaming.py +++ b/examples/research_projects/robust-speech-event/run_speech_recognition_ctc_streaming.py @@ -234,7 +234,7 @@ class DataTrainingArguments: metadata={ "help": ( "If :obj:`True`, will use the token generated when running" - ":obj:`transformers-cli login` as HTTP bearer authorization for remote files." + ":obj:`huggingface-cli login` as HTTP bearer authorization for remote files." ) }, ) diff --git a/examples/research_projects/tapex/run_tabfact_with_tapex.py b/examples/research_projects/tapex/run_tabfact_with_tapex.py index 19c21c33948edb..23d094f8992a63 100644 --- a/examples/research_projects/tapex/run_tabfact_with_tapex.py +++ b/examples/research_projects/tapex/run_tabfact_with_tapex.py @@ -175,7 +175,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/tapex/run_wikisql_with_tapex.py b/examples/research_projects/tapex/run_wikisql_with_tapex.py index 7573893629c6d6..1d402fa7e8f0e9 100644 --- a/examples/research_projects/tapex/run_wikisql_with_tapex.py +++ b/examples/research_projects/tapex/run_wikisql_with_tapex.py @@ -104,7 +104,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/tapex/run_wikitablequestions_with_tapex.py b/examples/research_projects/tapex/run_wikitablequestions_with_tapex.py index 7ffa8f5f91cc43..6f93f9b5166929 100644 --- a/examples/research_projects/tapex/run_wikitablequestions_with_tapex.py +++ b/examples/research_projects/tapex/run_wikitablequestions_with_tapex.py @@ -102,7 +102,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/research_projects/xtreme-s/run_xtreme_s.py b/examples/research_projects/xtreme-s/run_xtreme_s.py index d3e4f5cb38abf9..16fc1ac8a39c32 100644 --- a/examples/research_projects/xtreme-s/run_xtreme_s.py +++ b/examples/research_projects/xtreme-s/run_xtreme_s.py @@ -287,7 +287,7 @@ class DataTrainingArguments: metadata={ "help": ( "If :obj:`True`, will use the token generated when running" - ":obj:`transformers-cli login` as HTTP bearer authorization for remote files." + ":obj:`huggingface-cli login` as HTTP bearer authorization for remote files." ) }, ) diff --git a/examples/tensorflow/language-modeling/run_clm.py b/examples/tensorflow/language-modeling/run_clm.py index 46c8d339d970c3..3f12683d10d997 100755 --- a/examples/tensorflow/language-modeling/run_clm.py +++ b/examples/tensorflow/language-modeling/run_clm.py @@ -114,7 +114,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/language-modeling/run_mlm.py b/examples/tensorflow/language-modeling/run_mlm.py index 46b27dab662519..b421ed8e669c15 100755 --- a/examples/tensorflow/language-modeling/run_mlm.py +++ b/examples/tensorflow/language-modeling/run_mlm.py @@ -115,7 +115,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/multiple-choice/run_swag.py b/examples/tensorflow/multiple-choice/run_swag.py index b09b0e5598f514..6ba35bd0fd2023 100644 --- a/examples/tensorflow/multiple-choice/run_swag.py +++ b/examples/tensorflow/multiple-choice/run_swag.py @@ -157,7 +157,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/question-answering/run_qa.py b/examples/tensorflow/question-answering/run_qa.py index bd233f378a4dc9..91293aefb35f55 100755 --- a/examples/tensorflow/question-answering/run_qa.py +++ b/examples/tensorflow/question-answering/run_qa.py @@ -80,7 +80,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/summarization/run_summarization.py b/examples/tensorflow/summarization/run_summarization.py index 5d0737fdeffbb9..6d4cf99e6782f8 100644 --- a/examples/tensorflow/summarization/run_summarization.py +++ b/examples/tensorflow/summarization/run_summarization.py @@ -101,7 +101,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/text-classification/run_glue.py b/examples/tensorflow/text-classification/run_glue.py index fe7ef66ece129c..9fb0b3f8e43482 100644 --- a/examples/tensorflow/text-classification/run_glue.py +++ b/examples/tensorflow/text-classification/run_glue.py @@ -183,7 +183,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/text-classification/run_text_classification.py b/examples/tensorflow/text-classification/run_text_classification.py index 210a30344dbc0e..b5d19032971c5b 100644 --- a/examples/tensorflow/text-classification/run_text_classification.py +++ b/examples/tensorflow/text-classification/run_text_classification.py @@ -173,7 +173,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/token-classification/run_ner.py b/examples/tensorflow/token-classification/run_ner.py index cd4eea6feeb6dc..caa47e115a4bfa 100644 --- a/examples/tensorflow/token-classification/run_ner.py +++ b/examples/tensorflow/token-classification/run_ner.py @@ -83,7 +83,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/examples/tensorflow/translation/run_translation.py b/examples/tensorflow/translation/run_translation.py index 6e12288fd44f52..7f5eb9eb9defb7 100644 --- a/examples/tensorflow/translation/run_translation.py +++ b/examples/tensorflow/translation/run_translation.py @@ -95,7 +95,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) }, diff --git a/scripts/tatoeba/README.md b/scripts/tatoeba/README.md index b86caf51d725b0..7c492ec4f46e2e 100644 --- a/scripts/tatoeba/README.md +++ b/scripts/tatoeba/README.md @@ -57,7 +57,7 @@ To upload all converted models, 2. Login to `transformers-cli` ```bash -transformers-cli login +huggingface-cli login ``` 3. Run the `upload_models` script diff --git a/scripts/tatoeba/upload_models.sh b/scripts/tatoeba/upload_models.sh index 07c21edcbd519e..536eb5bc68c4c4 100755 --- a/scripts/tatoeba/upload_models.sh +++ b/scripts/tatoeba/upload_models.sh @@ -2,7 +2,7 @@ for FILE in converted/*; do model_name=`basename $FILE` - transformers-cli repo create $model_name -y + huggingface-cli repo create $model_name -y git clone https://huggingface.co/Helsinki-NLP/$model_name mv $FILE/* $model_name/ cd $model_name diff --git a/src/transformers/commands/user.py b/src/transformers/commands/user.py index 2f073235d25648..938f4c8ea8b616 100644 --- a/src/transformers/commands/user.py +++ b/src/transformers/commands/user.py @@ -22,9 +22,6 @@ from . import BaseTransformersCLICommand -UPLOAD_MAX_FILES = 15 - - class UserCommands(BaseTransformersCLICommand): @staticmethod def register_subcommand(parser: ArgumentParser): @@ -105,7 +102,7 @@ class LoginCommand(BaseUserCommand): def run(self): print( ANSI.red( - "ERROR! `transformers-cli login` uses an outdated login mechanism " + "ERROR! `huggingface-cli login` uses an outdated login mechanism " "that is not compatible with the Hugging Face Hub backend anymore. " "Please use `huggingface-cli login instead." ) diff --git a/src/transformers/configuration_utils.py b/src/transformers/configuration_utils.py index b10475127b4fce..b924cec9ae021c 100755 --- a/src/transformers/configuration_utils.py +++ b/src/transformers/configuration_utils.py @@ -463,7 +463,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/dynamic_module_utils.py b/src/transformers/dynamic_module_utils.py index 7baafd214c2558..da1434067cbdf8 100644 --- a/src/transformers/dynamic_module_utils.py +++ b/src/transformers/dynamic_module_utils.py @@ -195,7 +195,7 @@ def get_cached_module_file( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any @@ -345,7 +345,7 @@ def get_class_from_dynamic_module( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or `bool`, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/feature_extraction_utils.py b/src/transformers/feature_extraction_utils.py index ec68f355191c1d..394d67a8c5a1a7 100644 --- a/src/transformers/feature_extraction_utils.py +++ b/src/transformers/feature_extraction_utils.py @@ -251,7 +251,7 @@ def from_pretrained( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/modeling_tf_utils.py b/src/transformers/modeling_tf_utils.py index 1a63d32e4196a0..354bd9592f30cd 100644 --- a/src/transformers/modeling_tf_utils.py +++ b/src/transformers/modeling_tf_utils.py @@ -2096,7 +2096,7 @@ def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): Whether or not to only look at local files (e.g., not try doanloading the model). use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any @@ -2472,8 +2472,8 @@ def push_to_hub( Whether or not the repository created should be private (requires a paying subscription). use_auth_token (`bool` or `str`, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). Will default to `True` if - `repo_url` is not specified. + when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url` + is not specified. max_shard_size (`int` or `str`, *optional*, defaults to `"10GB"`): Only applicable for models. The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size lower than this size. If expressed as a string, needs to be digits followed diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 8709ec66365c66..771f1d2d5d84c0 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1659,7 +1659,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P Whether or not to only look at local files (i.e., do not try to download the model). use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/models/auto/feature_extraction_auto.py b/src/transformers/models/auto/feature_extraction_auto.py index ed526369df4f38..db581d03d8fb7e 100644 --- a/src/transformers/models/auto/feature_extraction_auto.py +++ b/src/transformers/models/auto/feature_extraction_auto.py @@ -142,7 +142,7 @@ def get_feature_extractor_config( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any @@ -247,7 +247,7 @@ def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/models/auto/processing_auto.py b/src/transformers/models/auto/processing_auto.py index d81dd19ea23dde..aed7b4b9761373 100644 --- a/src/transformers/models/auto/processing_auto.py +++ b/src/transformers/models/auto/processing_auto.py @@ -135,7 +135,7 @@ def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/models/auto/tokenization_auto.py b/src/transformers/models/auto/tokenization_auto.py index 7a2dc2941fdd09..d8759fd4e7842e 100644 --- a/src/transformers/models/auto/tokenization_auto.py +++ b/src/transformers/models/auto/tokenization_auto.py @@ -357,7 +357,7 @@ def get_tokenizer_config( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any diff --git a/src/transformers/pipelines/__init__.py b/src/transformers/pipelines/__init__.py index d2a4b663801d78..104726bbd8cc7a 100755 --- a/src/transformers/pipelines/__init__.py +++ b/src/transformers/pipelines/__init__.py @@ -505,7 +505,7 @@ def pipeline( Whether or not to use a Fast tokenizer if possible (a [`PreTrainedTokenizerFast`]). use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). device_map (`str` or `Dict[str, Union[int, str, torch.device]`, *optional*): Sent directly as `model_kwargs` (just a simpler shortcut). When `accelerate` library is present, set `device_map="auto"` to compute the most optimized `device_map` automatically. [More diff --git a/src/transformers/tokenization_utils_base.py b/src/transformers/tokenization_utils_base.py index fc1c0ff8da3b32..91537ef46cc864 100644 --- a/src/transformers/tokenization_utils_base.py +++ b/src/transformers/tokenization_utils_base.py @@ -1596,7 +1596,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], 'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). local_files_only (`bool`, *optional*, defaults to `False`): Whether or not to only rely on local files and not to attempt to download any files. revision (`str`, *optional*, defaults to `"main"`): diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 2488ab8f690865..1aa086da6721ec 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -716,7 +716,7 @@ def cached_file( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any @@ -870,7 +870,7 @@ def get_file_from_repo( 'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). revision (`str`, *optional*, defaults to `"main"`): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any @@ -983,7 +983,7 @@ def get_list_of_files( identifier allowed by git. use_auth_token (`str` or *bool*, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `~/.huggingface`). local_files_only (`bool`, *optional*, defaults to `False`): Whether or not to only rely on local files and not to attempt to download any files. @@ -1161,8 +1161,8 @@ def push_to_hub( Whether or not the repository created should be private (requires a paying subscription). use_auth_token (`bool` or `str`, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). Will default to `True` if - `repo_url` is not specified. + when running `huggingface-cli login` (stored in `~/.huggingface`). Will default to `True` if `repo_url` + is not specified. max_shard_size (`int` or `str`, *optional*, defaults to `"10GB"`): Only applicable for models. The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size lower than this size. If expressed as a string, needs to be digits followed diff --git a/templates/adding_a_new_example_script/{{cookiecutter.directory_name}}/run_{{cookiecutter.example_shortcut}}.py b/templates/adding_a_new_example_script/{{cookiecutter.directory_name}}/run_{{cookiecutter.example_shortcut}}.py index f07029ec242caa..e7a622edd71527 100755 --- a/templates/adding_a_new_example_script/{{cookiecutter.directory_name}}/run_{{cookiecutter.example_shortcut}}.py +++ b/templates/adding_a_new_example_script/{{cookiecutter.directory_name}}/run_{{cookiecutter.example_shortcut}}.py @@ -118,7 +118,7 @@ class ModelArguments: use_auth_token: bool = field( default=False, metadata={ - "help": "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "help": "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." }, ) diff --git a/tests/sagemaker/scripts/pytorch/run_glue_model_parallelism.py b/tests/sagemaker/scripts/pytorch/run_glue_model_parallelism.py index 534b1656d10f3e..01185fdabac527 100644 --- a/tests/sagemaker/scripts/pytorch/run_glue_model_parallelism.py +++ b/tests/sagemaker/scripts/pytorch/run_glue_model_parallelism.py @@ -181,7 +181,7 @@ class ModelArguments: default=False, metadata={ "help": ( - "Will use the token generated when running `transformers-cli login` (necessary to use this script " + "Will use the token generated when running `huggingface-cli login` (necessary to use this script " "with private models)." ) },