Update README.md
Browse files
README.md
CHANGED
@@ -52,10 +52,18 @@ This is a small multilingual language model based on a Transformer architecture
|
|
52 |
```python
|
53 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
54 |
|
55 |
-
tokenizer = AutoTokenizer.from_pretrained("Duino/Darija-GPT")
|
56 |
-
model = AutoModelForCausalLM.from_pretrained("Duino/Darija-GPT")
|
57 |
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
```
|
60 |
|
61 |
## Training Plot
|
|
|
52 |
```python
|
53 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
54 |
|
55 |
+
tokenizer = AutoTokenizer.from_pretrained("Duino/Darija-GPT")
|
56 |
+
model = AutoModelForCausalLM.from_pretrained("Duino/Darija-GPT")
|
57 |
|
58 |
+
prompt_text = "هذا نموذج لغوي صغير" # Example prompt in Arabic/Darija
|
59 |
+
input_ids = tokenizer.encode(prompt_text, return_tensors="pt").to(model.device)
|
60 |
+
|
61 |
+
# Generate text (adjust max_length, temperature, top_p as needed)
|
62 |
+
output = model.generate(input_ids, max_new_tokens=50, temperature=0.9, top_p=0.9)
|
63 |
+
|
64 |
+
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
|
65 |
+
print("Prompt:", prompt_text)
|
66 |
+
print("Generated text:", generated_text)
|
67 |
```
|
68 |
|
69 |
## Training Plot
|