Skip to content

Commit

Permalink
replaced suggested models
Browse files Browse the repository at this point in the history
  • Loading branch information
NoushNabi committed Mar 18, 2024
1 parent 45dafa2 commit c838d16
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 11 deletions.
7 changes: 1 addition & 6 deletions .github/workflows/test_openvino_examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,4 @@ jobs:
- name: Test examples
run: |
python examples/openvino/test_examples.py
#python examples/openvino/audio-classification/run_audio_classification.py --model_name_or_path facebook/wav2vec2-base --nncf_compression_config examples/openvino/audio-classification/configs/wav2vec2-base-qat.json --dataset_name superb --dataset_config_name ks --max_train_samples 10 --max_eval_samples 2 --output_dir /tmp/qat-wav2vec2-base-ft-keyword-spotting --overwrite_output_dir --remove_unused_columns False --do_train --do_eval --learning_rate 3e-5 --max_length_seconds 1 --attention_mask False --warmup_ratio 0.1 --num_train_epochs 1 --gradient_accumulation_steps 1 --dataloader_num_workers 1 --logging_strategy steps --logging_steps 1 --evaluation_strategy epoch --save_strategy epoch --load_best_model_at_end False --seed 42
#python examples/openvino/text-classification/run_glue.py --model_name_or_path sshleifer/tiny-distilbert-base-cased-distilled-squad --task_name sst2 --max_train_samples 10 --max_eval_samples 2 --output_dir /tmp/qat-bert-base-ft-sst2 --overwrite_output_dir --do_train --do_eval --max_seq_length 128 --learning_rate 1e-5 --optim adamw_torch --num_train_epochs 1 --logging_steps 1 --evaluation_strategy steps --eval_steps 1 --save_strategy epoch --seed 42
#python examples/openvino/question-answering/run_qa.py --model_name_or_path sshleifer/tiny-distilbert-base-cased-distilled-squad --dataset_name squad --do_train --do_eval --max_train_samples 10 --max_eval_samples 2 --learning_rate 3e-5 --num_train_epochs 1 --max_seq_length 384 --doc_stride 128 --output_dir /tmp/outputs_squad/ --overwrite_output_dir
#python examples/openvino/image-classification/run_image_classification.py --model_name_or_path nateraw/vit-base-beans --dataset_name beans --max_train_samples 10 --max_eval_samples 2 --remove_unused_columns False --do_train --do_eval --learning_rate 2e-5 --num_train_epochs 1 --logging_strategy steps --logging_steps 1 --evaluation_strategy epoch --save_strategy epoch --save_total_limit 1 --seed 1337 --output_dir /tmp/beans_outputs/
python examples/openvino/test_examples.py
8 changes: 3 additions & 5 deletions examples/openvino/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
import unittest
from unittest.mock import patch

#os.environ["CUDA_VISIBLE_DEVICES"] = ""

SRC_DIRS = [
os.path.join(os.path.dirname(__file__), dirname)
for dirname in [
Expand All @@ -42,7 +40,7 @@ def test_audio_classification(self):
with tempfile.TemporaryDirectory() as tmp_dir:
test_args = f"""
run_audio_classification.py
--model_name_or_path facebook/wav2vec2-base
--model_name_or_path hf-internal-testing/tiny-random-Wav2Vec2Model
--nncf_compression_config examples/openvino/audio-classification/configs/wav2vec2-base-qat.json
--dataset_name superb
--dataset_config_name ks
Expand Down Expand Up @@ -99,7 +97,7 @@ def test_text_classification(self):
with tempfile.TemporaryDirectory() as tmp_dir:
test_args = f"""
run_glue.py
--model_name_or_path sshleifer/tiny-distilbert-base-cased-distilled-squad
--model_name_or_path hf-internal-testing/tiny-random-DistilBertForSequenceClassification
--task_name sst2
--max_train_samples 10
--max_eval_samples 2
Expand All @@ -125,7 +123,7 @@ def test_question_answering(self):
with tempfile.TemporaryDirectory() as tmp_dir:
test_args = f"""
run_qa.py
--model_name_or_path sshleifer/tiny-distilbert-base-cased-distilled-squad
--model_name_or_path hf-internal-testing/tiny-random-DistilBertForQuestionAnswering
--dataset_name squad
--do_train
--do_eval
Expand Down

0 comments on commit c838d16

Please sign in to comment.