Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions docs/source/_toctree.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,6 @@
title: Model Classes
- local: model_utils
title: Model Utilities
- local: best_of_n
title: Best of N Sampling
- local: judges
title: Judges
- local: callbacks
Expand Down
68 changes: 0 additions & 68 deletions docs/source/best_of_n.md

This file was deleted.

113 changes: 0 additions & 113 deletions tests/test_best_of_n_sampler.py

This file was deleted.

16 changes: 0 additions & 16 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
DataCollatorForChatML,
RepeatSampler,
batch_generation,
decode_and_strip_padding,
entropy_from_logits,
flush_left,
flush_right,
Expand Down Expand Up @@ -170,21 +169,6 @@ def test_create_peft_config_use_peft_true(self):
assert getattr(peft_config, arg) == value


class TestDecodeAndStripPadding(TrlTestCase):
def setup_method(self):
self.tokenizer = AutoTokenizer.from_pretrained("trl-internal-testing/tiny-Qwen2ForCausalLM-2.5")

def test_example_with_padding(self):
inputs = self.tokenizer(["Hello world", "Hello"], padding=True, return_tensors="pt")
decoded = decode_and_strip_padding(inputs["input_ids"], self.tokenizer)
assert decoded == ["Hello world", "Hello"]

def test_example_without_padding(self):
inputs = self.tokenizer(["Hello", "Hello"], padding=False, return_tensors="pt")
decoded = decode_and_strip_padding(inputs["input_ids"], self.tokenizer)
assert decoded == ["Hello", "Hello"]


class TestGenerateModelCard(TrlTestCase):
def test_full(self):
model_card = generate_model_card(
Expand Down
2 changes: 0 additions & 2 deletions trl/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
"truncate_dataset",
"unpair_preference_dataset",
],
"extras": ["BestOfNSampler"],
"models": [
"SUPPORTED_ARCHITECTURES",
"AutoModelForCausalLMWithValueHead",
Expand Down Expand Up @@ -133,7 +132,6 @@
truncate_dataset,
unpair_preference_dataset,
)
from .extras import BestOfNSampler
from .models import (
SUPPORTED_ARCHITECTURES,
AutoModelForCausalLMWithValueHead,
Expand Down
16 changes: 0 additions & 16 deletions trl/extras/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import TYPE_CHECKING

from ..import_utils import _LazyModule


_import_structure = {
"best_of_n_sampler": ["BestOfNSampler"],
}

if TYPE_CHECKING:
from .best_of_n_sampler import BestOfNSampler
else:
import sys

sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
141 changes: 0 additions & 141 deletions trl/extras/best_of_n_sampler.py

This file was deleted.

Loading
Loading