Update README.md
#2
by
mioskomi
- opened
README.md
CHANGED
|
@@ -30,14 +30,18 @@ pip install transformers accelerate peft
|
|
| 30 |
|
| 31 |
Load the model.
|
| 32 |
```python
|
| 33 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 34 |
from peft import PeftModel, PeftConfig
|
| 35 |
|
| 36 |
repo_id = "stefan-m-lenz/Mistral-Nemo-ICDOPS-QA-2024"
|
| 37 |
config = PeftConfig.from_pretrained(repo_id, device_map="auto")
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
| 39 |
model = PeftModel.from_pretrained(model, repo_id, device_map="auto")
|
| 40 |
-
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,
|
|
|
|
| 41 |
|
| 42 |
# Test input
|
| 43 |
test_input = "Was ist der ICD-10-Code für die Tumordiagnose „Bronchialkarzinom, Hauptbronchus“?"
|
|
|
|
| 30 |
|
| 31 |
Load the model.
|
| 32 |
```python
|
| 33 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
| 34 |
from peft import PeftModel, PeftConfig
|
| 35 |
|
| 36 |
repo_id = "stefan-m-lenz/Mistral-Nemo-ICDOPS-QA-2024"
|
| 37 |
config = PeftConfig.from_pretrained(repo_id, device_map="auto")
|
| 38 |
+
quantization_config = BitsAndBytesConfig(load_in_8bit=True)
|
| 39 |
+
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path,
|
| 40 |
+
device_map="auto",
|
| 41 |
+
quantization_config=quantization_config)
|
| 42 |
model = PeftModel.from_pretrained(model, repo_id, device_map="auto")
|
| 43 |
+
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,
|
| 44 |
+
device_map="auto")
|
| 45 |
|
| 46 |
# Test input
|
| 47 |
test_input = "Was ist der ICD-10-Code für die Tumordiagnose „Bronchialkarzinom, Hauptbronchus“?"
|