Skip to content

Commit

Permalink
Deprecate unused OpenLlama architecture (#24922)
Browse files Browse the repository at this point in the history
* Resolve typo in check_repo.py

* Specify encoding when opening modeling files

* Deprecate the OpenLlama architecture

* Add disclaimer pointing to Llama

I'm open to different wordings here

* Match the capitalisation of LLaMA
  • Loading branch information
tomaarsen authored Jul 20, 2023
1 parent 8fd8c8e commit 79444f3
Show file tree
Hide file tree
Showing 12 changed files with 64 additions and 419 deletions.
15 changes: 15 additions & 0 deletions docs/source/en/model_doc/open-llama.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,21 @@ rendered properly in your Markdown viewer.

# Open-Llama

<Tip warning={true}>

This model is in maintenance mode only, so we won't accept any new PRs changing its code.

If you run into any issues running this model, please reinstall the last version that supported this model: v4.31.0.
You can do so by running the following command: `pip install -U transformers==4.31.0`.

</Tip>

<Tip warning={true}>

This model differs from the [OpenLLaMA models](https://huggingface.co/models?search=openllama) on the Hugging Face Hub, which primarily use the [LLaMA](llama) architecture.

</Tip>

## Overview

The Open-Llama model was proposed in [Open-Llama project](https://github.com/s-JoL/Open-Llama) by community developer s-JoL.
Expand Down
22 changes: 11 additions & 11 deletions src/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@
"MCTCTProcessor",
],
"models.deprecated.mmbt": ["MMBTConfig"],
"models.deprecated.open_llama": ["OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenLlamaConfig"],
"models.deprecated.retribert": [
"RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"RetriBertConfig",
Expand Down Expand Up @@ -445,7 +446,6 @@
"NystromformerConfig",
],
"models.oneformer": ["ONEFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "OneFormerConfig", "OneFormerProcessor"],
"models.open_llama": ["OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenLlamaConfig"],
"models.openai": ["OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenAIGPTConfig", "OpenAIGPTTokenizer"],
"models.opt": ["OPTConfig"],
"models.owlvit": [
Expand Down Expand Up @@ -1536,6 +1536,9 @@
]
)
_import_structure["models.deprecated.mmbt"].extend(["MMBTForClassification", "MMBTModel", "ModalEmbeddings"])
_import_structure["models.deprecated.open_llama"].extend(
["OpenLlamaForCausalLM", "OpenLlamaForSequenceClassification", "OpenLlamaModel", "OpenLlamaPreTrainedModel"]
)
_import_structure["models.deprecated.retribert"].extend(
["RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "RetriBertModel", "RetriBertPreTrainedModel"]
)
Expand Down Expand Up @@ -2300,9 +2303,6 @@
"OneFormerPreTrainedModel",
]
)
_import_structure["models.open_llama"].extend(
["OpenLlamaForCausalLM", "OpenLlamaForSequenceClassification", "OpenLlamaModel", "OpenLlamaPreTrainedModel"]
)
_import_structure["models.openai"].extend(
[
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
Expand Down Expand Up @@ -4239,6 +4239,7 @@
MCTCTProcessor,
)
from .models.deprecated.mmbt import MMBTConfig
from .models.deprecated.open_llama import OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenLlamaConfig
from .models.deprecated.retribert import (
RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP,
RetriBertConfig,
Expand Down Expand Up @@ -4390,7 +4391,6 @@
from .models.nllb_moe import NLLB_MOE_PRETRAINED_CONFIG_ARCHIVE_MAP, NllbMoeConfig
from .models.nystromformer import NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, NystromformerConfig
from .models.oneformer import ONEFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, OneFormerConfig, OneFormerProcessor
from .models.open_llama import OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenLlamaConfig
from .models.openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig, OpenAIGPTTokenizer
from .models.opt import OPTConfig
from .models.owlvit import (
Expand Down Expand Up @@ -5334,6 +5334,12 @@
MCTCTPreTrainedModel,
)
from .models.deprecated.mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings
from .models.deprecated.open_llama import (
OpenLlamaForCausalLM,
OpenLlamaForSequenceClassification,
OpenLlamaModel,
OpenLlamaPreTrainedModel,
)
from .models.deprecated.retribert import (
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
RetriBertModel,
Expand Down Expand Up @@ -5954,12 +5960,6 @@
OneFormerModel,
OneFormerPreTrainedModel,
)
from .models.open_llama import (
OpenLlamaForCausalLM,
OpenLlamaForSequenceClassification,
OpenLlamaModel,
OpenLlamaPreTrainedModel,
)
from .models.openai import (
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
OpenAIGPTDoubleHeadsModel,
Expand Down
1 change: 0 additions & 1 deletion src/transformers/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@
nllb_moe,
nystromformer,
oneformer,
open_llama,
openai,
opt,
owlvit,
Expand Down
1 change: 1 addition & 0 deletions src/transformers/models/auto/configuration_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -652,6 +652,7 @@
"bort",
"mctct",
"mmbt",
"open_llama",
"retribert",
"tapex",
"trajectory_transformer",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
from typing import TYPE_CHECKING

from ...utils import (
from ....utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_sentencepiece_available,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
# limitations under the License.
""" Open-Llama model configuration"""

from ...configuration_utils import PretrainedConfig
from ...utils import logging
from ....configuration_utils import PretrainedConfig
from ....utils import logging


logger = logging.get_logger(__name__)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss

from ...activations import ACT2FN
from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
from ...modeling_utils import PreTrainedModel
from ...utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
from ....activations import ACT2FN
from ....modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
from ....modeling_utils import PreTrainedModel
from ....utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
from .configuration_open_llama import OpenLlamaConfig


Expand Down
56 changes: 28 additions & 28 deletions src/transformers/utils/dummy_pt_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -2396,6 +2396,34 @@ def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaForCausalLM(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaForSequenceClassification(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaModel(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaPreTrainedModel(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None


Expand Down Expand Up @@ -5461,34 +5489,6 @@ def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaForCausalLM(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaForSequenceClassification(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaModel(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


class OpenLlamaPreTrainedModel(metaclass=DummyObject):
_backends = ["torch"]

def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])


OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None


Expand Down
Empty file.
Loading

0 comments on commit 79444f3

Please sign in to comment.