teddyllm commited on
Commit
ef2d982
1 Parent(s): 4dd4347

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -12,7 +12,7 @@ from folium.plugins import Fullscreen
12
  from geopy.geocoders import Nominatim
13
  from collections import OrderedDict
14
  from geopy.adapters import AioHTTPAdapter
15
- from huggingface_hub import InferenceClient
16
 
17
  from examples import (
18
  description_sf,
@@ -24,8 +24,8 @@ from examples import (
24
  trip_examples
25
  )
26
 
27
- repo_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
28
- llm_client = InferenceClient(model=repo_id, timeout=180, token=os.getenv("hf_token"))
29
  end_sequence = "I hope that helps!"
30
 
31
  def generate_key_points(text):
@@ -49,7 +49,7 @@ Now begin. You can make the descriptions a bit more verbose than in the examples
49
 
50
  Description: {text}
51
  Thought:"""
52
- return llm_client.text_generation(prompt, max_new_tokens=2000, stream=True, stop_sequences=[end_sequence])
53
 
54
 
55
  def parse_llm_output(output):
 
12
  from geopy.geocoders import Nominatim
13
  from collections import OrderedDict
14
  from geopy.adapters import AioHTTPAdapter
15
+ from langchain_nvidia_ai_endpoints import ChatNVIDIA
16
 
17
  from examples import (
18
  description_sf,
 
24
  trip_examples
25
  )
26
 
27
+ repo_id = "meta/llama-3.1-405b-instruct"
28
+ llm_client = ChatNVIDIA(model=repo_id)
29
  end_sequence = "I hope that helps!"
30
 
31
  def generate_key_points(text):
 
49
 
50
  Description: {text}
51
  Thought:"""
52
+ return llm_client.invoke(prompt).content
53
 
54
 
55
  def parse_llm_output(output):