Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F65026357
__init__.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Fri, May 31, 04:47
Size
3 KB
Mime Type
text/x-python
Expires
Sun, Jun 2, 04:47 (2 d)
Engine
blob
Format
Raw Data
Handle
17991631
Attached To
R11484 ADDI
__init__.py
View Options
# flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
typing
import
TYPE_CHECKING
from
...file_utils
import
_BaseLazyModule
,
is_tf_available
,
is_tokenizers_available
,
is_torch_available
_import_structure
=
{
"configuration_distilbert"
:
[
"DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP"
,
"DistilBertConfig"
],
"tokenization_distilbert"
:
[
"DistilBertTokenizer"
],
}
if
is_tokenizers_available
():
_import_structure
[
"tokenization_distilbert_fast"
]
=
[
"DistilBertTokenizerFast"
]
if
is_torch_available
():
_import_structure
[
"modeling_distilbert"
]
=
[
"DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST"
,
"DistilBertForMaskedLM"
,
"DistilBertForMultipleChoice"
,
"DistilBertForQuestionAnswering"
,
"DistilBertForSequenceClassification"
,
"DistilBertForTokenClassification"
,
"DistilBertModel"
,
"DistilBertPreTrainedModel"
,
]
if
is_tf_available
():
_import_structure
[
"modeling_tf_distilbert"
]
=
[
"TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST"
,
"TFDistilBertForMaskedLM"
,
"TFDistilBertForMultipleChoice"
,
"TFDistilBertForQuestionAnswering"
,
"TFDistilBertForSequenceClassification"
,
"TFDistilBertForTokenClassification"
,
"TFDistilBertMainLayer"
,
"TFDistilBertModel"
,
"TFDistilBertPreTrainedModel"
,
]
if
TYPE_CHECKING
:
from
.configuration_distilbert
import
DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
,
DistilBertConfig
from
.tokenization_distilbert
import
DistilBertTokenizer
if
is_tokenizers_available
():
from
.tokenization_distilbert_fast
import
DistilBertTokenizerFast
if
is_torch_available
():
from
.modeling_distilbert
import
(
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST
,
DistilBertForMaskedLM
,
DistilBertForMultipleChoice
,
DistilBertForQuestionAnswering
,
DistilBertForSequenceClassification
,
DistilBertForTokenClassification
,
DistilBertModel
,
DistilBertPreTrainedModel
,
)
if
is_tf_available
():
from
.modeling_tf_distilbert
import
(
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_LIST
,
TFDistilBertForMaskedLM
,
TFDistilBertForMultipleChoice
,
TFDistilBertForQuestionAnswering
,
TFDistilBertForSequenceClassification
,
TFDistilBertForTokenClassification
,
TFDistilBertMainLayer
,
TFDistilBertModel
,
TFDistilBertPreTrainedModel
,
)
else
:
import
importlib
import
os
import
sys
class
_LazyModule
(
_BaseLazyModule
):
"""
Module class that surfaces all objects but only performs associated imports when the objects are requested.
"""
__file__
=
globals
()[
"__file__"
]
__path__
=
[
os
.
path
.
dirname
(
__file__
)]
def
_get_module
(
self
,
module_name
:
str
):
return
importlib
.
import_module
(
"."
+
module_name
,
self
.
__name__
)
sys
.
modules
[
__name__
]
=
_LazyModule
(
__name__
,
_import_structure
)
Event Timeline
Log In to Comment