Skip to content

Commit

Permalink
reformatted (#1128)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangsibo1129 authored Dec 23, 2023
1 parent e0dec27 commit 8f5b492
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions examples/scripts/ppo_multi_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
from tqdm import tqdm
from transformers import AutoTokenizer, BitsAndBytesConfig, HfArgumentParser

from trl import AutoModelForCausalLMWithValueHead, PPOConfig, PPOTrainer, is_xpu_available
from trl import AutoModelForCausalLMWithValueHead, PPOConfig, PPOTrainer
from trl.core import LengthSampler
from trl.import_utils import is_npu_available, is_xpu_available


input_min_text_length = 6
Expand Down Expand Up @@ -82,7 +83,7 @@ def tokenize(example):
)
model = AutoModelForCausalLMWithValueHead.from_pretrained(
script_args.model_name,
device_map={"": "xpu:0"} if is_xpu_available() else {"": 0},
device_map={"": "xpu:0"} if is_xpu_available() else {"": "npu:0"} if is_npu_available else {"": 0},
peft_config=lora_config,
quantization_config=nf4_config,
reward_adapter=script_args.rm_adapter,
Expand Down

0 comments on commit 8f5b492

Please sign in to comment.