-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
3a36ffa
commit 32ad5f8
Showing
50 changed files
with
1,401 additions
and
20 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
# This file is a log of any destrutive changes that have been made to the | ||
# turnkey models corpus. | ||
|
||
# models_renamed is a dictionary of dictionaries. At the top level, the keys | ||
# are the corpus names. At the leaf level, the keys are original model names | ||
# and the values are the new model names. | ||
# models_deleted is a list of models that have been removed from the corpus | ||
|
||
models_renamed: | ||
- transformers | ||
- funnelbase : funnel_small_base | ||
models_deleted: | ||
- None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -18,4 +18,5 @@ sentence_transformers | |
scipy | ||
numpy | ||
timm | ||
fvcore | ||
fvcore | ||
sacremoses |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::dpr_context_encoder author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import DPRContextEncoder, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = DPRContextEncoder.from_pretrained("facebook/dpr-ctx_encoder-single-nq-base") | ||
else: | ||
config = AutoConfig.from_pretrained("facebook/dpr-ctx_encoder-single-nq-base") | ||
model = DPRContextEncoder(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
# labels: name::dpr_question_encoder author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import DPRQuestionEncoder, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = DPRQuestionEncoder.from_pretrained( | ||
"facebook/dpr-question_encoder-single-nq-base" | ||
) | ||
else: | ||
config = AutoConfig.from_pretrained("facebook/dpr-question_encoder-single-nq-base") | ||
model = DPRQuestionEncoder(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::dpr_reader author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import DPRReader, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = DPRReader.from_pretrained("facebook/dpr-reader-single-nq-base") | ||
else: | ||
config = AutoConfig.from_pretrained("facebook/dpr-reader-single-nq-base") | ||
model = DPRReader(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::electra_base author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ElectraModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ElectraModel.from_pretrained("google/electra-base-discriminator") | ||
else: | ||
config = AutoConfig.from_pretrained("google/electra-base-discriminator") | ||
model = ElectraModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::electra_generator_base author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ElectraModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ElectraModel.from_pretrained("google/electra-base-generator") | ||
else: | ||
config = AutoConfig.from_pretrained("google/electra-base-generator") | ||
model = ElectraModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::electra_generator_large author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ElectraModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ElectraModel.from_pretrained("google/electra-large-generator") | ||
else: | ||
config = AutoConfig.from_pretrained("google/electra-large-generator") | ||
model = ElectraModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::electra_generator_small author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ElectraModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ElectraModel.from_pretrained("google/electra-small-generator") | ||
else: | ||
config = AutoConfig.from_pretrained("google/electra-small-generator") | ||
model = ElectraModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::electra_large author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ElectraModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ElectraModel.from_pretrained("google/electra-large-discriminator") | ||
else: | ||
config = AutoConfig.from_pretrained("google/electra-large-discriminator") | ||
model = ElectraModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::ernie2_large author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ErnieModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ErnieModel.from_pretrained("nghuyong/ernie-2.0-large-en") | ||
else: | ||
config = AutoConfig.from_pretrained("nghuyong/ernie-2.0-large-en") | ||
model = ErnieModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# labels: name::ernie3_base author::transformers task::Generative_AI license::apache-2.0 | ||
from turnkeyml.parser import parse | ||
from transformers import ErnieModel, AutoConfig | ||
import torch | ||
|
||
torch.manual_seed(0) | ||
|
||
# Parsing command-line arguments | ||
pretrained, batch_size, max_seq_length = parse( | ||
["pretrained", "batch_size", "max_seq_length"] | ||
) | ||
|
||
# Model and input configurations | ||
if pretrained: | ||
model = ErnieModel.from_pretrained("nghuyong/ernie-3.0-base-zh") | ||
else: | ||
config = AutoConfig.from_pretrained("nghuyong/ernie-3.0-base-zh") | ||
model = ErnieModel(config) | ||
|
||
# Make sure the user's sequence length fits within the model's maximum | ||
assert max_seq_length <= model.config.max_position_embeddings | ||
|
||
|
||
inputs = { | ||
"input_ids": torch.ones(batch_size, max_seq_length, dtype=torch.long), | ||
"attention_mask": torch.ones(batch_size, max_seq_length, dtype=torch.float), | ||
} | ||
|
||
|
||
# Call model | ||
model(**inputs) |
Oops, something went wrong.