Skip to content

Commit

Permalink
model addition (imports etc)
Browse files Browse the repository at this point in the history
  • Loading branch information
eustlb authored and Eustache Le Bihan committed Dec 15, 2024
1 parent 461f210 commit 22dbaae
Show file tree
Hide file tree
Showing 9 changed files with 153 additions and 2 deletions.
2 changes: 2 additions & 0 deletions docs/source/en/_toctree.yml
Original file line number Diff line number Diff line change
Expand Up @@ -490,6 +490,8 @@
title: mLUKE
- local: model_doc/mobilebert
title: MobileBERT
- local: model_doc/moonshine
title: moonshine
- local: model_doc/mpnet
title: MPNet
- local: model_doc/mpt
Expand Down
108 changes: 108 additions & 0 deletions docs/source/en/model_doc/moonshine.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
<!--Copyright 2024 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
⚠️ Note that this file is in Markdown but contain specific syntax for our doc-builder (similar to MDX) that may not be
rendered properly in your Markdown viewer.
-->

# moonshine

# moonshine

## Overview

The moonshine model was proposed in [<INSERT PAPER NAME HERE>](<INSERT PAPER LINK HERE>) by <INSERT AUTHORS HERE>.
<INSERT SHORT SUMMARY HERE>

The abstract from the paper is the following:

*<INSERT PAPER ABSTRACT HERE>*

Tips:

<INSERT TIPS ABOUT MODEL HERE>

This model was contributed by [INSERT YOUR HF USERNAME HERE](https://huggingface.co/<INSERT YOUR HF USERNAME HERE>).
The original code can be found [here](<INSERT LINK TO GITHUB REPO HERE>).


## MoonshineConfig

[[autodoc]] MoonshineConfig

## MoonshineTokenizer

[[autodoc]] MoonshineTokenizer
- set_prefix_tokens
- build_inputs_with_special_tokens
- get_special_tokens_mask
- create_token_type_ids_from_sequences
- save_vocabulary
- batch_decode
- decode
- basic_normalize
- normalize

## MoonshineTokenizerFast

[[autodoc]] MoonshineTokenizerFast
- set_prefix_tokens
- build_inputs_with_special_tokens
- get_special_tokens_mask
- create_token_type_ids_from_sequences
- save_vocabulary
- batch_decode
- decode
- basic_normalize
- normalize

## MoonshineFeatureExtractor

[[autodoc]] MoonshineFeatureExtractor
- __call__

## MoonshineProcessor

[[autodoc]] MoonshineProcessor
- __call__
- from_pretrained
- save_pretrained
- batch_decode
- decode

<frameworkcontent>
<pt>

## MoonshineModel

[[autodoc]] MoonshineModel
- forward
- _mask_input_features

## MoonshineForConditionalGeneration

[[autodoc]] MoonshineForConditionalGeneration
- forward
- generate

## MoonshineForCausalLM

[[autodoc]] MoonshineForCausalLM
- forward

## MoonshineForAudioClassification

[[autodoc]] MoonshineForAudioClassification
- forward

</pt>
<tf>
22 changes: 22 additions & 0 deletions src/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@
"models.gpt_bigcode": ["GPTBigCodeConfig"],
"models.gpt_neo": ["GPTNeoConfig"],
"models.gpt_neox": ["GPTNeoXConfig"],
"models.moonshine": ["MoonshineConfig"],
"models.gpt_neox_japanese": ["GPTNeoXJapaneseConfig"],
"models.gpt_sw3": [],
"models.gptj": ["GPTJConfig"],
Expand Down Expand Up @@ -1081,6 +1082,7 @@
_import_structure["models.gemma"].append("GemmaTokenizerFast")
_import_structure["models.gpt2"].append("GPT2TokenizerFast")
_import_structure["models.gpt_neox"].append("GPTNeoXTokenizerFast")
_import_structure["models.moonshine"].append("MoonshineTokenizer")
_import_structure["models.gpt_neox_japanese"].append("GPTNeoXJapaneseTokenizer")
_import_structure["models.herbert"].append("HerbertTokenizerFast")
_import_structure["models.layoutlm"].append("LayoutLMTokenizerFast")
Expand Down Expand Up @@ -2360,6 +2362,16 @@
"GPTNeoXPreTrainedModel",
]
)
_import_structure["models.moonshine"].extend(
[
"MoonshineForCausalLM",
"MoonshineForQuestionAnswering",
"MoonshineForSequenceClassification",
"MoonshineForTokenClassification",
"MoonshineModel",
"MoonshinePreTrainedModel",
]
)
_import_structure["models.gpt_neox_japanese"].extend(
[
"GPTNeoXJapaneseForCausalLM",
Expand Down Expand Up @@ -5339,6 +5351,7 @@
)
from .models.gpt_neo import GPTNeoConfig
from .models.gpt_neox import GPTNeoXConfig
from .models.moonshine import MoonshineConfig
from .models.gpt_neox_japanese import (
GPTNeoXJapaneseConfig,
)
Expand Down Expand Up @@ -6009,6 +6022,7 @@
from .models.gemma import GemmaTokenizerFast
from .models.gpt2 import GPT2TokenizerFast
from .models.gpt_neox import GPTNeoXTokenizerFast
from .models.moonshine import MoonshineTokenizer
from .models.gpt_neox_japanese import GPTNeoXJapaneseTokenizer
from .models.herbert import HerbertTokenizerFast
from .models.layoutlm import LayoutLMTokenizerFast
Expand Down Expand Up @@ -7102,6 +7116,14 @@
GPTNeoXModel,
GPTNeoXPreTrainedModel,
)
from .models.moonshine import (
MoonshineForCausalLM,
MoonshineForQuestionAnswering,
MoonshineForSequenceClassification,
MoonshineForTokenClassification,
MoonshineModel,
MoonshinePreTrainedModel,
)
from .models.gpt_neox_japanese import (
GPTNeoXJapaneseForCausalLM,
GPTNeoXJapaneseModel,
Expand Down
1 change: 1 addition & 0 deletions src/transformers/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@
gpt_bigcode,
gpt_neo,
gpt_neox,
moonshine,
gpt_neox_japanese,
gpt_sw3,
gptj,
Expand Down
2 changes: 2 additions & 0 deletions src/transformers/models/auto/configuration_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@
("gpt_bigcode", "GPTBigCodeConfig"),
("gpt_neo", "GPTNeoConfig"),
("gpt_neox", "GPTNeoXConfig"),
("moonshine", "MoonshineConfig"),
("gpt_neox_japanese", "GPTNeoXJapaneseConfig"),
("gptj", "GPTJConfig"),
("gptsan-japanese", "GPTSanJapaneseConfig"),
Expand Down Expand Up @@ -425,6 +426,7 @@
("gpt_bigcode", "GPTBigCode"),
("gpt_neo", "GPT Neo"),
("gpt_neox", "GPT NeoX"),
("moonshine", "moonshine"),
("gpt_neox_japanese", "GPT NeoX Japanese"),
("gptj", "GPT-J"),
("gptsan-japanese", "GPTSAN-japanese"),
Expand Down
6 changes: 6 additions & 0 deletions src/transformers/models/auto/modeling_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@
("gpt_bigcode", "GPTBigCodeModel"),
("gpt_neo", "GPTNeoModel"),
("gpt_neox", "GPTNeoXModel"),
("moonshine", "MoonshineModel"),
("gpt_neox_japanese", "GPTNeoXJapaneseModel"),
("gptj", "GPTJModel"),
("gptsan-japanese", "GPTSanJapaneseForConditionalGeneration"),
Expand Down Expand Up @@ -408,6 +409,7 @@
("gpt_bigcode", "GPTBigCodeForCausalLM"),
("gpt_neo", "GPTNeoForCausalLM"),
("gpt_neox", "GPTNeoXForCausalLM"),
("moonshine", "MoonshineForCausalLM"),
("gpt_neox_japanese", "GPTNeoXJapaneseForCausalLM"),
("gptj", "GPTJForCausalLM"),
("gptsan-japanese", "GPTSanJapaneseForConditionalGeneration"),
Expand Down Expand Up @@ -494,6 +496,7 @@
("gpt_bigcode", "GPTBigCodeForCausalLM"),
("gpt_neo", "GPTNeoForCausalLM"),
("gpt_neox", "GPTNeoXForCausalLM"),
("moonshine", "MoonshineForCausalLM"),
("gpt_neox_japanese", "GPTNeoXJapaneseForCausalLM"),
("gptj", "GPTJForCausalLM"),
("granite", "GraniteForCausalLM"),
Expand Down Expand Up @@ -951,6 +954,7 @@
("gpt_bigcode", "GPTBigCodeForSequenceClassification"),
("gpt_neo", "GPTNeoForSequenceClassification"),
("gpt_neox", "GPTNeoXForSequenceClassification"),
("moonshine", "MoonshineForSequenceClassification"),
("gptj", "GPTJForSequenceClassification"),
("ibert", "IBertForSequenceClassification"),
("jamba", "JambaForSequenceClassification"),
Expand Down Expand Up @@ -1039,6 +1043,7 @@
("gpt2", "GPT2ForQuestionAnswering"),
("gpt_neo", "GPTNeoForQuestionAnswering"),
("gpt_neox", "GPTNeoXForQuestionAnswering"),
("moonshine", "MoonshineForQuestionAnswering"),
("gptj", "GPTJForQuestionAnswering"),
("ibert", "IBertForQuestionAnswering"),
("layoutlmv2", "LayoutLMv2ForQuestionAnswering"),
Expand Down Expand Up @@ -1142,6 +1147,7 @@
("gpt_bigcode", "GPTBigCodeForTokenClassification"),
("gpt_neo", "GPTNeoForTokenClassification"),
("gpt_neox", "GPTNeoXForTokenClassification"),
("moonshine", "MoonshineForTokenClassification"),
("ibert", "IBertForTokenClassification"),
("layoutlm", "LayoutLMForTokenClassification"),
("layoutlmv2", "LayoutLMv2ForTokenClassification"),
Expand Down
1 change: 1 addition & 0 deletions src/transformers/models/auto/tokenization_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,7 @@
("mluke", ("MLukeTokenizer" if is_sentencepiece_available() else None, None)),
("mobilebert", ("MobileBertTokenizer", "MobileBertTokenizerFast" if is_tokenizers_available() else None)),
("moshi", (None, "PreTrainedTokenizerFast" if is_tokenizers_available() else None)),
("moonshine", (None, "PreTrainedTokenizerFast" if is_tokenizers_available() else None)),
("mpnet", ("MPNetTokenizer", "MPNetTokenizerFast" if is_tokenizers_available() else None)),
("mpt", (None, "GPTNeoXTokenizerFast" if is_tokenizers_available() else None)),
("mra", ("RobertaTokenizer", "RobertaTokenizerFast" if is_tokenizers_available() else None)),
Expand Down
13 changes: 11 additions & 2 deletions src/transformers/models/moonshine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,13 @@
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_gemma2"] = [
_import_structure["modeling_moonshine"] = [
"MoonshineForConditionalGeneration",
"MoonshineModel",
"MoonshinePreTrainedModel",
]


if TYPE_CHECKING:
from .configuration_moonshine import MoonshineConfig

Expand All @@ -42,7 +46,12 @@
except OptionalDependencyNotAvailable:
pass
else:
pass
from .modeling_moonshine import (
MoonshineForConditionalGeneration,
MoonshineModel,
MoonshinePreTrainedModel,
)


else:
import sys
Expand Down
Empty file.

0 comments on commit 22dbaae

Please sign in to comment.