fix(README): remove trust_remote_code requirement from tokenizer snippet

This commit is contained in:
Jonathan Tow 2024-03-01 07:35:30 +00:00 committed by system
parent a7a1fb8a83
commit db5a120c4d
No known key found for this signature in database
GPG Key ID: 6A528E38E0733467

@ -30,7 +30,7 @@ Get started generating text with `Stable LM 2 1.6B` by using the following code
```python
from transformers import AutoModelForCausalLM, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b")
model = AutoModelForCausalLM.from_pretrained(
"stabilityai/stablelm-2-1_6b",
torch_dtype="auto",
@ -54,7 +54,7 @@ print(tokenizer.decode(tokens[0], skip_special_tokens=True))
```python
from transformers import AutoModelForCausalLM, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b")
model = AutoModelForCausalLM.from_pretrained(
"stabilityai/stablelm-2-1_6b",
torch_dtype="auto",