mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Properly guard PyTorch stuff (#23452)
* Properly guard PyTorch stuff * [all-test] * [all-test] Fix model imports as well * Making sure StoppingCriteria is always defined * [all-test]
This commit is contained in:
parent
ffad4f1373
commit
167aa76cfa
@ -24,9 +24,8 @@ from typing import Dict
|
||||
import requests
|
||||
from huggingface_hub import HfFolder, hf_hub_download, list_spaces
|
||||
|
||||
from ..generation import StoppingCriteria, StoppingCriteriaList
|
||||
from ..models.auto import AutoModelForCausalLM, AutoTokenizer
|
||||
from ..utils import is_openai_available, logging
|
||||
from ..models.auto import AutoTokenizer
|
||||
from ..utils import is_openai_available, is_torch_available, logging
|
||||
from .base import TASK_MAPPING, TOOL_CONFIG_FILE, Tool, load_tool, supports_remote
|
||||
from .prompts import CHAT_MESSAGE_PROMPT, CHAT_PROMPT_TEMPLATE, RUN_PROMPT_TEMPLATE
|
||||
from .python_interpreter import evaluate
|
||||
@ -38,6 +37,12 @@ logger = logging.get_logger(__name__)
|
||||
if is_openai_available():
|
||||
import openai
|
||||
|
||||
if is_torch_available():
|
||||
from ..generation import StoppingCriteria, StoppingCriteriaList
|
||||
from ..models.auto import AutoModelForCausalLM
|
||||
else:
|
||||
StoppingCriteria = object
|
||||
|
||||
_tools_are_initialized = False
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user