Tonic commited on
Commit
2ca300b
1 Parent(s): a1c598c

refactor main for utils.py

Browse files
Files changed (2) hide show
  1. app.py +5 -12
  2. utils.py +31 -0
app.py CHANGED
@@ -14,28 +14,21 @@ import gradio as gr
14
  from huggingface_hub import InferenceClient
15
  import openai
16
  from openai import OpenAI
17
- from globalvars import API_BASE, API_KEY, intention_prompt
18
  from dotenv import load_dotenv
 
 
 
19
 
20
  os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'max_split_size_mb:30'
21
  os.environ['CUDA_LAUNCH_BLOCKING'] = '1'
22
  os.environ['CUDA_CACHE_DISABLE'] = '1'
23
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
24
 
25
- def load_env_variables():
26
- # Load the .env file
27
- load_dotenv()
28
-
29
- # Retrieve the environment variables
30
- hf_token = os.getenv('HF_TOKEN')
31
- yi_token = os.getenv('YI_TOKEN')
32
-
33
- return hf_token, yi_token
34
 
35
  hf_token, yi_token = load_env_variables()
36
 
37
-
38
-
39
  ## use instruct embeddings
40
  # Load the tokenizer and model
41
  tokenizer = AutoTokenizer.from_pretrained('nvidia/NV-Embed-v1', token = hf_token , trust_remote_code=True)
 
14
  from huggingface_hub import InferenceClient
15
  import openai
16
  from openai import OpenAI
17
+ from globalvars import API_BASE, intention_prompt, tasks
18
  from dotenv import load_dotenv
19
+ import re
20
+ from utils import load_env_variables
21
+
22
 
23
  os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'max_split_size_mb:30'
24
  os.environ['CUDA_LAUNCH_BLOCKING'] = '1'
25
  os.environ['CUDA_CACHE_DISABLE'] = '1'
26
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
27
 
28
+ ### Utils
 
 
 
 
 
 
 
 
29
 
30
  hf_token, yi_token = load_env_variables()
31
 
 
 
32
  ## use instruct embeddings
33
  # Load the tokenizer and model
34
  tokenizer = AutoTokenizer.from_pretrained('nvidia/NV-Embed-v1', token = hf_token , trust_remote_code=True)
utils.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from dotenv import load_dotenv
3
+ import re
4
+ import os
5
+ from globalvars import tasks
6
+
7
+ def load_env_variables():
8
+ # Load the .env file
9
+ load_dotenv()
10
+
11
+ # Retrieve the environment variables
12
+ hf_token = os.getenv('HF_TOKEN')
13
+ yi_token = os.getenv('YI_TOKEN')
14
+
15
+ return hf_token, yi_token
16
+
17
+ def parse_and_route(example_output: str):
18
+ # Regex pattern to match the true task
19
+ pattern = r'"(\w+)":\s?true'
20
+
21
+ # Find the true task
22
+ match = re.search(pattern, example_output)
23
+
24
+ if match:
25
+ true_task = match.group(1)
26
+ if true_task in tasks:
27
+ return {true_task: tasks[true_task]}
28
+ else:
29
+ return {true_task: "Task description not found"}
30
+ else:
31
+ return "No true task found in the example output"