Instructions to use Lowin/chinese-bigbird-tiny-1024 with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use Lowin/chinese-bigbird-tiny-1024 with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("feature-extraction", model="Lowin/chinese-bigbird-tiny-1024")# Load model directly from transformers import AutoTokenizer, AutoModel tokenizer = AutoTokenizer.from_pretrained("Lowin/chinese-bigbird-tiny-1024") model = AutoModel.from_pretrained("Lowin/chinese-bigbird-tiny-1024") - Notebooks
- Google Colab
- Kaggle
import jieba_fast
from transformers import BertTokenizer
from transformers import BigBirdModel
class JiebaTokenizer(BertTokenizer):
def __init__(
self, pre_tokenizer=lambda x: jieba_fast.cut(x, HMM=False), *args, **kwargs
):
super().__init__(*args, **kwargs)
self.pre_tokenizer = pre_tokenizer
def _tokenize(self, text, *arg, **kwargs):
split_tokens = []
for text in self.pre_tokenizer(text):
if text in self.vocab:
split_tokens.append(text)
else:
split_tokens.extend(super()._tokenize(text))
return split_tokens
model = BigBirdModel.from_pretrained('Lowin/chinese-bigbird-tiny-1024')
tokenizer = JiebaTokenizer.from_pretrained('Lowin/chinese-bigbird-tiny-1024')
- Downloads last month
- 6