removed transformers from requirements.txt, added error message

This commit is contained in:
atamyrat 2023-08-21 06:07:29 +03:00
parent 155475a523
commit 0dd82158f6
2 changed files with 9 additions and 2 deletions

View File

@ -280,7 +280,12 @@ def load_checkpoint(checkpoint):
def load_hf_model(model_path):
from transformers import AutoModelForCausalLM
try:
from transformers import AutoModelForCausalLM
except ImportError:
print("Error: transformers package is required to load huggingface models")
print("Please run `pip install transformers` to install it")
return None
# load HF model
hf_model = AutoModelForCausalLM.from_pretrained(model_path)
@ -357,5 +362,8 @@ if __name__ == "__main__":
else:
parser.error("Input model missing: --checkpoint or --hf is required")
if model is None:
parser.error("Can't load input model!")
# export
model_export(model, args.filepath, args.version)

View File

@ -5,4 +5,3 @@ sentencepiece==0.1.99
torch==2.0.1
tqdm==4.64.1
wandb==0.15.5
transformers==4.31.0