Skip to content

Commit

Permalink
unblock wwb cli usage despite broken onnx (#1297)
Browse files Browse the repository at this point in the history
together with huggingface/optimum-intel#1048 and
huggingface/optimum#2114 allow me run wwb with
already exported models on windows platform
  • Loading branch information
eaidova authored Dec 5, 2024
1 parent 0b81108 commit e8c1df8
Showing 1 changed file with 1 addition and 12 deletions.
13 changes: 1 addition & 12 deletions tools/who_what_benchmark/whowhatbench/wwb.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@
import pandas as pd
from datasets import load_dataset
from diffusers import DiffusionPipeline
from optimum.exporters.tasks import TasksManager
from optimum.intel import OVPipelineForText2Image
from optimum.intel.openvino import OVModelForCausalLM, OVModelForVisualCausalLM
from optimum.utils import NormalizedConfigManager, NormalizedTextConfig
from optimum.intel.openvino import OVModelForCausalLM, OVModelForVisualCausalLM, OVPipelineForText2Image
from PIL import Image
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer, AutoProcessor, AutoModel, AutoModelForVision2Seq
import openvino as ov
Expand All @@ -26,14 +23,6 @@
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

TasksManager._SUPPORTED_MODEL_TYPE["stablelm-epoch"] = (
TasksManager._SUPPORTED_MODEL_TYPE["llama"]
)
NormalizedConfigManager._conf["stablelm-epoch"] = NormalizedTextConfig.with_args(
num_layers="num_hidden_layers",
num_attention_heads="num_attention_heads",
)


class GenAIModelWrapper:
"""
Expand Down

0 comments on commit e8c1df8

Please sign in to comment.