update readme

This commit is contained in:
zR 2024-06-20 21:00:46 +08:00
parent 6b2c1930ee
commit 5722878e25
3 changed files with 2 additions and 3 deletions
basic_demo
finetune_demo

View File

@ -100,7 +100,7 @@ python trans_web_demo.py
+ 使用 Batch 推理。 + 使用 Batch 推理。
```shell ```shell
python cli_batch_request_demo.py python trans_batch_demo.py
``` ```
### 使用 vLLM 后端代码 ### 使用 vLLM 后端代码

View File

@ -105,7 +105,7 @@ python trans_web_demo.py
+ Use Batch inference. + Use Batch inference.
```shell ```shell
python cli_batch_request_demo.py python trans_batch_demo.py
``` ```
### Use vLLM backend code ### Use vLLM backend code

View File

@ -260,7 +260,6 @@ def process_batch(
loss_masks = [False, False] loss_masks = [False, False]
for message in conv: for message in conv:
message = process_message(message) message = process_message(message)
loss_mask_val = False if message['role'] in ('system', 'user', 'observation') else True loss_mask_val = False if message['role'] in ('system', 'user', 'observation') else True
new_input_ids = tokenizer.apply_chat_template([message], tokenize=True, return_dict=False)[2:] new_input_ids = tokenizer.apply_chat_template([message], tokenize=True, return_dict=False)[2:]
new_loss_masks = [loss_mask_val] * len(new_input_ids) new_loss_masks = [loss_mask_val] * len(new_input_ids)