update readme
This commit is contained in:
parent
6b2c1930ee
commit
5722878e25
|
@ -100,7 +100,7 @@ python trans_web_demo.py
|
|||
+ 使用 Batch 推理。
|
||||
|
||||
```shell
|
||||
python cli_batch_request_demo.py
|
||||
python trans_batch_demo.py
|
||||
```
|
||||
|
||||
### 使用 vLLM 后端代码
|
||||
|
|
|
@ -105,7 +105,7 @@ python trans_web_demo.py
|
|||
+ Use Batch inference.
|
||||
|
||||
```shell
|
||||
python cli_batch_request_demo.py
|
||||
python trans_batch_demo.py
|
||||
```
|
||||
|
||||
### Use vLLM backend code
|
||||
|
|
|
@ -260,7 +260,6 @@ def process_batch(
|
|||
loss_masks = [False, False]
|
||||
for message in conv:
|
||||
message = process_message(message)
|
||||
|
||||
loss_mask_val = False if message['role'] in ('system', 'user', 'observation') else True
|
||||
new_input_ids = tokenizer.apply_chat_template([message], tokenize=True, return_dict=False)[2:]
|
||||
new_loss_masks = [loss_mask_val] * len(new_input_ids)
|
||||
|
|
Loading…
Reference in New Issue