File size: 1,206 Bytes
d6f5b34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5f96dea
 
d6f5b34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
---
license: apache-2.0
datasets:
- jkhedri/psychology-dataset
---

**Usage**
```
from llama_cpp import Llama
from typing import Optional
import time
from huggingface_hub import hf_hub_download

def generate_prompt(input_text: str, instruction: Optional[str] = None) -> str:
    text = f"### Question: {input_text}\n\n### Answer: "
    if instruction:
        text = f"### Instruction: {instruction}\n\n{text}"
    return text

# Set up the parameters
repo_id = "vdpappu/gemma2_psychologist_2_gguf"
filename = "gemma2_psychologist_2.gguf"
local_dir = "."

downloaded_file_path = hf_hub_download(repo_id=repo_id, filename=filename, local_dir=local_dir)
print(f"File downloaded to: {downloaded_file_path}")

# Load the model 
llm = Llama(model_path=downloaded_file_path) #1 is thug
question = "I feel lonely. What should I do?"
prompt = generate_prompt(input_text=question)

start = time.time()
output = llm(prompt, 
             temperature=0.7,
             top_p=0.9,
             top_k=50,
             repeat_penalty=1.5,
             max_tokens=200, 
             stop=["Question:","<eos>"])
end = time.time()
print(f"Inference time: {end-start:.2f} seconds \n")
print(output['choices'][0]['text'])
```