Example of how to run inference on this with optimum (optimum[onnx-runtime])?

#4
by ernestyalumni - opened

I'm getting the following error when running ORTFluxPipeline.from_pretrained(..):

library_name = TasksManager.infer_library_from_model(

/usr/local/lib/python3.10/dist-packages/optimum/exporters/tasks.py:2064: in infer_library_from_model
library_name = cls._infer_library_from_model_name_or_path(


cls = <class 'optimum.exporters.tasks.TasksManager'>, model_name_or_path = '/Data1/Models/Diffusion/black-forest-labs/FLUX.1-dev-onnx', subfolder = '', revision = None
cache_dir = '/root/.cache/huggingface/hub', token = None

@classmethod
def _infer_library_from_model_name_or_path(
    cls,
    model_name_or_path: Union[str, Path],
    subfolder: str = "",
    revision: Optional[str] = None,
    cache_dir: str = HUGGINGFACE_HUB_CACHE,
    token: Optional[Union[bool, str]] = None,
):
    """
    Infers the library from the model name or path.

    Args:
        model_name_or_path (`str`):
            The model to infer the task from. This can either be the name of a repo on the HuggingFace Hub, or a path
            to a local directory containing the model.
        subfolder (`str`, defaults to `""`):
            In case the model files are located inside a subfolder of the model directory / repo on the Hugging
            Face Hub, you can specify the subfolder name here.
        revision (`Optional[str]`, *optional*, defaults to `None`):
            Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id.
        cache_dir (`Optional[str]`, *optional*):
            Path to a directory in which a downloaded pretrained model weights have been cached if the standard cache should not be used.
        token (`Optional[Union[bool,str]]`, defaults to `None`):
            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
            when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`).

    Returns:
        `str`: The library name automatically detected from the model repo.
    """

    inferred_library_name = None

    all_files, _ = TasksManager.get_model_files(
        model_name_or_path,
        subfolder=subfolder,
        cache_dir=cache_dir,
        revision=revision,
        token=token,
    )

    if "model_index.json" in all_files:
        inferred_library_name = "diffusers"
    elif (
        any(file_path.startswith("sentence_") for file_path in all_files)
        or "config_sentence_transformers.json" in all_files
    ):
        inferred_library_name = "sentence_transformers"
    elif "config.json" in all_files:
        kwargs = {
            "subfolder": subfolder,
            "revision": revision,
            "cache_dir": cache_dir,
            "token": token,
        }
        # We do not use PretrainedConfig.from_pretrained which has unwanted warnings about model type.
        config_dict, kwargs = PretrainedConfig.get_config_dict(model_name_or_path, **kwargs)
        model_config = PretrainedConfig.from_dict(config_dict, **kwargs)

        if hasattr(model_config, "pretrained_cfg") or hasattr(model_config, "architecture"):
            inferred_library_name = "timm"
        elif hasattr(model_config, "_diffusers_version"):
            inferred_library_name = "diffusers"
        else:
            inferred_library_name = "transformers"

    if inferred_library_name is None:
      raise ValueError(
            "The library name could not be automatically inferred. If using the command-line, please provide the argument --library {transformers,diffusers,timm,sentence_transformers}. Example: `--library diffusers`."
        )

E ValueError: The library name could not be automatically inferred. If using the command-line, please provide the argument --library {transformers,diffusers,timm,sentence_transformers}. Example: --library diffusers.

/usr/local/lib/python3.10/dist-packages/optimum/exporters/tasks.py:2026: ValueError

the model_path I'm passing is something like "black-forest-labs/FLUX.1-dev-onnx"

Your need to confirm your account before you can post a new comment.

Sign up or log in to comment