2024-11-12 17:31:52 +08:00
|
|
|
|
---
|
|
|
|
|
language:
|
|
|
|
|
- zh
|
|
|
|
|
tags:
|
|
|
|
|
- bert
|
|
|
|
|
license: "apache-2.0"
|
|
|
|
|
---
|
2024-11-12 17:30:39 +08:00
|
|
|
|
|
2024-11-12 17:31:52 +08:00
|
|
|
|
# Please use 'Bert' related functions to load this model!
|
|
|
|
|
|
|
|
|
|
## Chinese BERT with Whole Word Masking
|
|
|
|
|
For further accelerating Chinese natural language processing, we provide **Chinese pre-trained BERT with Whole Word Masking**.
|
|
|
|
|
|
|
|
|
|
**[Pre-Training with Whole Word Masking for Chinese BERT](https://arxiv.org/abs/1906.08101)**
|
|
|
|
|
Yiming Cui, Wanxiang Che, Ting Liu, Bing Qin, Ziqing Yang, Shijin Wang, Guoping Hu
|
|
|
|
|
|
|
|
|
|
This repository is developed based on:https://github.com/google-research/bert
|
|
|
|
|
|
|
|
|
|
You may also interested in,
|
|
|
|
|
- Chinese BERT series: https://github.com/ymcui/Chinese-BERT-wwm
|
|
|
|
|
- Chinese MacBERT: https://github.com/ymcui/MacBERT
|
|
|
|
|
- Chinese ELECTRA: https://github.com/ymcui/Chinese-ELECTRA
|
|
|
|
|
- Chinese XLNet: https://github.com/ymcui/Chinese-XLNet
|
|
|
|
|
- Knowledge Distillation Toolkit - TextBrewer: https://github.com/airaria/TextBrewer
|
|
|
|
|
|
|
|
|
|
More resources by HFL: https://github.com/ymcui/HFL-Anthology
|
|
|
|
|
|
|
|
|
|
## Citation
|
|
|
|
|
If you find the technical report or resource is useful, please cite the following technical report in your paper.
|
|
|
|
|
- Primary: https://arxiv.org/abs/2004.13922
|
|
|
|
|
```
|
|
|
|
|
@inproceedings{cui-etal-2020-revisiting,
|
|
|
|
|
title = "Revisiting Pre-Trained Models for {C}hinese Natural Language Processing",
|
|
|
|
|
author = "Cui, Yiming and
|
|
|
|
|
Che, Wanxiang and
|
|
|
|
|
Liu, Ting and
|
|
|
|
|
Qin, Bing and
|
|
|
|
|
Wang, Shijin and
|
|
|
|
|
Hu, Guoping",
|
|
|
|
|
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing: Findings",
|
|
|
|
|
month = nov,
|
|
|
|
|
year = "2020",
|
|
|
|
|
address = "Online",
|
|
|
|
|
publisher = "Association for Computational Linguistics",
|
|
|
|
|
url = "https://www.aclweb.org/anthology/2020.findings-emnlp.58",
|
|
|
|
|
pages = "657--668",
|
|
|
|
|
}
|
|
|
|
|
```
|
|
|
|
|
- Secondary: https://arxiv.org/abs/1906.08101
|
|
|
|
|
```
|
|
|
|
|
@article{chinese-bert-wwm,
|
|
|
|
|
title={Pre-Training with Whole Word Masking for Chinese BERT},
|
|
|
|
|
author={Cui, Yiming and Che, Wanxiang and Liu, Ting and Qin, Bing and Yang, Ziqing and Wang, Shijin and Hu, Guoping},
|
|
|
|
|
journal={arXiv preprint arXiv:1906.08101},
|
|
|
|
|
year={2019}
|
|
|
|
|
}
|
|
|
|
|
```
|