Skip to content

Commit bb2a6c3

Browse files
committed
quality check
Signed-off-by: Raphael Glon <[email protected]>
1 parent 3daa1ad commit bb2a6c3

File tree

4 files changed

+6
-8
lines changed

4 files changed

+6
-8
lines changed

src/huggingface_inference_toolkit/heavy_utils.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from typing import Optional, Union
88

99
from huggingface_hub import HfApi, login, snapshot_download
10-
1110
from transformers import WhisperForConditionalGeneration, pipeline
1211
from transformers.file_utils import is_tf_available, is_torch_available
1312
from transformers.pipelines import Pipeline
@@ -17,15 +16,15 @@
1716
is_diffusers_available,
1817
)
1918
from huggingface_inference_toolkit.logging import logger
19+
from huggingface_inference_toolkit.optimum_utils import (
20+
get_optimum_neuron_pipeline,
21+
is_optimum_neuron_available,
22+
)
2023
from huggingface_inference_toolkit.sentence_transformers_utils import (
2124
get_sentence_transformers_pipeline,
2225
is_sentence_transformers_available,
2326
)
2427
from huggingface_inference_toolkit.utils import create_artifact_filter
25-
from huggingface_inference_toolkit.optimum_utils import (
26-
get_optimum_neuron_pipeline,
27-
is_optimum_neuron_available,
28-
)
2928

3029

3130
def load_repository_from_hf(
@@ -185,4 +184,4 @@ def get_pipeline(
185184
hf_pipeline.model.config.forced_decoder_ids = hf_pipeline.tokenizer.get_decoder_prompt_ids(
186185
language="english", task="transcribe"
187186
)
188-
return hf_pipeline # type: ignore
187+
return hf_pipeline # type: ignore

src/huggingface_inference_toolkit/idle.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
import signal
66
import time
77

8-
98
LOG = logging.getLogger(__name__)
109

1110
LAST_START = None

src/huggingface_inference_toolkit/utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from huggingface_inference_toolkit.const import HF_DEFAULT_PIPELINE_NAME, HF_MODULE_NAME
66
from huggingface_inference_toolkit.logging import logger
77

8-
98
_optimum_available = importlib.util.find_spec("optimum") is not None
109

1110

src/huggingface_inference_toolkit/webservice_starlette.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from huggingface_inference_toolkit.serialization.base import ContentType
2828
from huggingface_inference_toolkit.serialization.json_utils import Jsoner
2929
from huggingface_inference_toolkit.utils import convert_params_to_int_or_bool
30+
3031
# _load_repository_from_hf,
3132
# convert_params_to_int_or_bool,
3233
# )

0 commit comments

Comments
 (0)