zeeshan391 commited on
Commit
70e9eac
1 Parent(s): 258f49c

updated app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -64,7 +64,7 @@ Total pages in storie are seven each page have one short paragraph and dont ask
64
  to create another adventure soon!
65
  """
66
 
67
- prompt_template = ChatPromptTemplate.from_messages([("system", system), ("human", "{text}")])
68
 
69
  @app.get("/")
70
  def read_root():
@@ -79,8 +79,9 @@ async def generate_story(story_request: StoryRequest):
79
  - **Theme:** {story_request.theme}
80
  - **Details Provided:** {story_request.txt}
81
  """
 
82
 
83
- final_prompt = prompt_template.format(text=story)
84
 
85
  # Create the LLMChain
86
  # chain = LLMChain(llm=llm, prompt=prompt_template)
@@ -94,7 +95,7 @@ async def generate_story(story_request: StoryRequest):
94
  # response = chain.invoke(final_prompt)
95
  # async for s in chain.astream(final_prompt):
96
  # print(s.content, end="", flush=True)
97
- response = await chain.ainvoke(final_prompt)
98
 
99
  if not response:
100
  raise HTTPException(status_code=500, detail="Failed to generate the story")
 
64
  to create another adventure soon!
65
  """
66
 
67
+ # prompt_template = ChatPromptTemplate.from_messages([("system", system), ("human", "{text}")])
68
 
69
  @app.get("/")
70
  def read_root():
 
79
  - **Theme:** {story_request.theme}
80
  - **Details Provided:** {story_request.txt}
81
  """
82
+ prompt_template = ChatPromptTemplate.from_messages([("system", system), ("human", story)])
83
 
84
+ # final_prompt = prompt_template.format(text=story)
85
 
86
  # Create the LLMChain
87
  # chain = LLMChain(llm=llm, prompt=prompt_template)
 
95
  # response = chain.invoke(final_prompt)
96
  # async for s in chain.astream(final_prompt):
97
  # print(s.content, end="", flush=True)
98
+ response = await chain.ainvoke()
99
 
100
  if not response:
101
  raise HTTPException(status_code=500, detail="Failed to generate the story")