chbsaikiran commited on
Commit
1ee31ef
·
1 Parent(s): 561a912

using smaller size model binary

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -30,10 +30,10 @@ config.rope_theta = 10000.0
30
  model = LlamaForCausalLM(config)
31
  device = "cpu"
32
  model_id = "chbsaikiran/smollm2_135M_model"
33
- checkpoint_path = hf_hub_download(repo_id=model_id, filename="model_bin.pt")
34
 
35
  checkpoint = torch.load(checkpoint_path, map_location=device)
36
- model.load_state_dict(checkpoint['model_state_dict'])
37
  model.to(device)
38
  model.eval()
39
 
 
30
  model = LlamaForCausalLM(config)
31
  device = "cpu"
32
  model_id = "chbsaikiran/smollm2_135M_model"
33
+ checkpoint_path = hf_hub_download(repo_id=model_id, filename="model_state_dict.pt")
34
 
35
  checkpoint = torch.load(checkpoint_path, map_location=device)
36
+ model.load_state_dict(checkpoint)
37
  model.to(device)
38
  model.eval()
39