Skip to content

Commit

Permalink
Make OPT policy backward compatible with pre-OPT transformers versions
Browse files Browse the repository at this point in the history
  • Loading branch information
arashb committed Aug 23, 2022
1 parent 86164c4 commit 7bf011e
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions deepspeed/module_inject/replace_policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,13 +444,12 @@ def __init__(self, client_module, inference=True):
try:
import transformers
HFOPTLayerPolicy._orig_layer_class = transformers.models.opt.modeling_opt.OPTDecoderLayer
if isinstance(DSPolicy.hf_model_config,
transformers.models.opt.configuration_opt.OPTConfig):
self.pre_attn_norm = self.hf_model_config.do_layer_norm_before
except:
HFOPTLayerPolicy._orig_layer_class = None

if isinstance(DSPolicy.hf_model_config,
transformers.models.opt.configuration_opt.OPTConfig):
self.pre_attn_norm = self.hf_model_config.do_layer_norm_before

def get_hidden_heads(self):
return self.client_module.self_attn.embed_dim, \
self.client_module.self_attn.num_heads
Expand Down

0 comments on commit 7bf011e

Please sign in to comment.