|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
|
|
from transformers.utils import ( |
|
|
OptionalDependencyNotAvailable, |
|
|
_LazyModule, |
|
|
is_sentencepiece_available, |
|
|
is_tokenizers_available, |
|
|
is_torch_available, |
|
|
) |
|
|
|
|
|
|
|
|
_import_structure = { |
|
|
"configuration_llama": ["LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "LlamaConfig"], |
|
|
} |
|
|
|
|
|
try: |
|
|
if not is_sentencepiece_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
_import_structure["tokenization_llama"] = ["LlamaTokenizer"] |
|
|
|
|
|
try: |
|
|
if not is_tokenizers_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
_import_structure["tokenization_llama_fast"] = ["LlamaTokenizerFast"] |
|
|
|
|
|
try: |
|
|
if not is_torch_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
_import_structure["modeling_llama"] = [ |
|
|
"LlamaForCausalLM", |
|
|
"LlamaModel", |
|
|
"LlamaPreTrainedModel", |
|
|
"LlamaForSequenceClassification", |
|
|
] |
|
|
|
|
|
|
|
|
if TYPE_CHECKING: |
|
|
from .configuration_llama import LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, LlamaConfig |
|
|
|
|
|
try: |
|
|
if not is_sentencepiece_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
from .tokenization_llama import LlamaTokenizer |
|
|
|
|
|
try: |
|
|
if not is_tokenizers_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
from .tokenization_llama_fast import LlamaTokenizerFast |
|
|
|
|
|
try: |
|
|
if not is_torch_available(): |
|
|
raise OptionalDependencyNotAvailable() |
|
|
except OptionalDependencyNotAvailable: |
|
|
pass |
|
|
else: |
|
|
from .modeling_llama import LlamaForCausalLM, LlamaForSequenceClassification, LlamaModel, LlamaPreTrainedModel |
|
|
|
|
|
|
|
|
else: |
|
|
import sys |
|
|
|
|
|
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) |
|
|
|