Loin123 commited on
Commit
1780d64
·
verified ·
1 Parent(s): 3c2a68d

Create AI.py

Browse files
Files changed (1) hide show
  1. AI.py +25 -0
AI.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+
3
+ model_name = "nvidia/AceMath-72B-Instruct"
4
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
5
+ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto")
6
+
7
+ prompt = "Jen enters a lottery by picking $4$ distinct numbers from $S=\\{1,2,3,\\cdots,9,10\\}.$ $4$ numbers are randomly chosen from $S.$ She wins a prize if at least two of her numbers were $2$ of the randomly chosen numbers, and wins the grand prize if all four of her numbers were the randomly chosen numbers. The probability of her winning the grand prize given that she won a prize is $\\tfrac{m}{n}$ where $m$ and $n$ are relatively prime positive integers. Find $m+n$."
8
+ messages = [{"role": "user", "content": prompt}]
9
+
10
+ text = tokenizer.apply_chat_template(
11
+ messages,
12
+ tokenize=False,
13
+ add_generation_prompt=True
14
+ )
15
+ model_inputs = tokenizer([text], return_tensors="pt").to("cuda")
16
+
17
+ generated_ids = model.generate(
18
+ **model_inputs,
19
+ max_new_tokens=2048
20
+ )
21
+ generated_ids = [
22
+ output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
23
+ ]
24
+
25
+ response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]