Skip to content

Commit

Permalink
Merge pull request #321 from caikit/revert-314-add_granite_modeling_l…
Browse files Browse the repository at this point in the history
…lama_main

Revert "Add granite modeling llama main"
  • Loading branch information
gkumbhat authored Feb 9, 2024
2 parents b738a99 + 882537d commit 4396e54
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 1,902 deletions.
5 changes: 2 additions & 3 deletions caikit_nlp/modules/text_generation/peft_prompt_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,7 @@ def train(

# Remove _name_or_path field as a model can be
# saved in different location but still same
base_model_config.pop("_name_or_path", None)
del base_model_config["_name_or_path"]
error.value_check(
"<NLP07232147E>",
"_name_or_path" not in base_model_config,
Expand Down Expand Up @@ -585,8 +585,7 @@ def load(
if peft_config.task_type == "CAUSAL_LM":
# get the transformers Causal LM model
base_model = AutoModelForCausalLM.from_pretrained(
peft_config.base_model_name_or_path,
torch_dtype=torch_dtype,
peft_config.base_model_name_or_path
)
# get the PEFT causal LM model
model = PeftModel.from_pretrained(base_model, model_config)
Expand Down
5 changes: 0 additions & 5 deletions caikit_nlp/resources/pretrained_model/hf_auto_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,6 @@

# Local
from ...data_model import GenerationTrainRecord, PromptOutputModelType

# Note: Below module is imported to allow loading of fm stack sphinx models
from ...toolkit.text_generation import ( # pylint: disable=unused-import
granite_modeling_llama,
)
from ...toolkit.verbalizer_utils import render_verbalizer
from .base import PretrainedModelBase

Expand Down
Loading

0 comments on commit 4396e54

Please sign in to comment.