|
@@ -138,7 +138,7 @@ if model_path is None:
|
|
|
"Model path must be specified either via --model-path argument or MODEL_PATH environment variable"
|
|
"Model path must be specified either via --model-path argument or MODEL_PATH environment variable"
|
|
|
)
|
|
)
|
|
|
|
|
|
|
|
-config = AutoConfig.from_pretrained(model_path)
|
|
|
|
|
|
|
+config = AutoConfig.from_pretrained(model_path, trust_remote_code=True)
|
|
|
|
|
|
|
|
print("Model type: ", config.model_type)
|
|
print("Model type: ", config.model_type)
|
|
|
print("Vocab size: ", config.vocab_size)
|
|
print("Vocab size: ", config.vocab_size)
|
|
@@ -148,8 +148,8 @@ print("BOS token id: ", config.bos_token_id)
|
|
|
print("EOS token id: ", config.eos_token_id)
|
|
print("EOS token id: ", config.eos_token_id)
|
|
|
|
|
|
|
|
print("Loading model and tokenizer using AutoTokenizer:", model_path)
|
|
print("Loading model and tokenizer using AutoTokenizer:", model_path)
|
|
|
-tokenizer = AutoTokenizer.from_pretrained(model_path)
|
|
|
|
|
-config = AutoConfig.from_pretrained(model_path)
|
|
|
|
|
|
|
+tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
|
|
|
|
+config = AutoConfig.from_pretrained(model_path, trust_remote_code=True)
|
|
|
|
|
|
|
|
if unreleased_model_name:
|
|
if unreleased_model_name:
|
|
|
model_name_lower = unreleased_model_name.lower()
|
|
model_name_lower = unreleased_model_name.lower()
|
|
@@ -171,7 +171,7 @@ if unreleased_model_name:
|
|
|
exit(1)
|
|
exit(1)
|
|
|
else:
|
|
else:
|
|
|
model = AutoModelForCausalLM.from_pretrained(
|
|
model = AutoModelForCausalLM.from_pretrained(
|
|
|
- model_path, device_map="auto", offload_folder="offload"
|
|
|
|
|
|
|
+ model_path, device_map="auto", offload_folder="offload", trust_remote_code=True
|
|
|
)
|
|
)
|
|
|
|
|
|
|
|
for name, module in model.named_modules():
|
|
for name, module in model.named_modules():
|