From 762cdcd8b82a06780349a40b6c218f42a17d6a4e Mon Sep 17 00:00:00 2001 From: "Jonathan C. McKinney" Date: Fri, 9 Jun 2023 00:38:34 -0700 Subject: [PATCH] Avoid peft import in global scope, loads bitsandbytes and that loads cuda and corrupts parent --- generate.py | 3 ++- utils.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/generate.py b/generate.py index dc054af07..9d7e5437a 100644 --- a/generate.py +++ b/generate.py @@ -33,7 +33,6 @@ import fire import torch -from peft import PeftModel from transformers import GenerationConfig, AutoModel, TextIteratorStreamer from accelerate import init_empty_weights, infer_auto_device_map @@ -710,6 +709,7 @@ def get_model( base_model, **model_kwargs ) + from peft import PeftModel # loads cuda, so avoid in global scope model = PeftModel.from_pretrained( model, lora_weights, @@ -727,6 +727,7 @@ def get_model( base_model, **model_kwargs ) + from peft import PeftModel # loads cuda, so avoid in global scope model = PeftModel.from_pretrained( model, lora_weights, diff --git a/utils.py b/utils.py index 22e5e2186..f012acafe 100644 --- a/utils.py +++ b/utils.py @@ -14,7 +14,6 @@ import traceback import zipfile from datetime import datetime -from enum import Enum import filelock import requests, uuid