Update README.md
Browse files
README.md
CHANGED
@@ -30,8 +30,8 @@ In addition, Doge uses Dynamic Mask Attention as sequence transformation and can
|
|
30 |
```python
|
31 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, TextStreamer
|
32 |
|
33 |
-
>>> tokenizer = AutoTokenizer.from_pretrained("wubingheng/
|
34 |
-
>>> model = AutoModelForCausalLM.from_pretrained("wubingheng/
|
35 |
|
36 |
>>> generation_config = GenerationConfig(
|
37 |
... max_new_tokens=256,
|
|
|
30 |
```python
|
31 |
>>> from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig, TextStreamer
|
32 |
|
33 |
+
>>> tokenizer = AutoTokenizer.from_pretrained("wubingheng/Doge-197M-Medical-SFT")
|
34 |
+
>>> model = AutoModelForCausalLM.from_pretrained("wubingheng/Doge-197M-Medical-SFT", trust_remote_code=True)
|
35 |
|
36 |
>>> generation_config = GenerationConfig(
|
37 |
... max_new_tokens=256,
|