djoneslocker commited on
Commit
5ae5de7
·
verified ·
1 Parent(s): 658b28b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +21 -3
README.md CHANGED
@@ -4,13 +4,31 @@ Make sure you have the transformers library installed:
4
 
5
  ## Load a Model from the Hub: Use the from_pretrained method to load the model and its tokenizer.
6
  ```from transformers import AutoTokenizer, AutoModel
 
 
7
 
8
- # Specify the model name
9
- model_name = "model_name"
10
 
11
  # Load the tokenizer and model
12
  tokenizer = AutoTokenizer.from_pretrained(model_name)
13
- model = AutoModel.from_pretrained(model_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  ```
15
 
16
  ## Then what to do
 
4
 
5
  ## Load a Model from the Hub: Use the from_pretrained method to load the model and its tokenizer.
6
  ```from transformers import AutoTokenizer, AutoModel
7
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
8
+ import torch
9
 
10
+ # Replace 'your-username/your-model-name' with your actual model path
11
+ model_name = "your-username/your-model-name"
12
 
13
  # Load the tokenizer and model
14
  tokenizer = AutoTokenizer.from_pretrained(model_name)
15
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
16
+
17
+ # Tokenize the input text
18
+ input_text = "Hello, how are you?"
19
+ inputs = tokenizer(input_text, return_tensors="pt")
20
+
21
+ # Run the model
22
+ with torch.no_grad():
23
+ outputs = model(**inputs)
24
+
25
+ # Process the outputs
26
+ logits = outputs.logits
27
+ predictions = torch.argmax(logits, dim=-1)
28
+
29
+ print(f"Predicted class: {predictions.item()}")
30
+
31
+
32
  ```
33
 
34
  ## Then what to do