fikriazain commited on
Commit
9c9bd93
·
1 Parent(s): 8fd8ae7

add name field

Browse files
Files changed (2) hide show
  1. bot/panda_bot.py +6 -3
  2. server.py +4 -3
bot/panda_bot.py CHANGED
@@ -23,7 +23,7 @@ Patient Care Focus: As an assistant, Panda should regularly check in on the pati
23
 
24
  Patient Information:
25
 
26
- - Patient Name: '{username}'
27
  - Hemodialysis Schedule 1: {schedule_one}
28
  - Hemodialysis Schedule 2: {schedule_two}
29
  - Liquid Intake: {liquid_intake}
@@ -120,6 +120,7 @@ class CustomPromptTemplate(StringPromptTemplate):
120
  blood_pressure: str
121
  food_intake: str
122
  current_time: str
 
123
 
124
  def format(self, **kwargs) -> str:
125
  # Get the intermediate steps (AgentAction, Observation tuples)
@@ -146,6 +147,7 @@ class CustomPromptTemplate(StringPromptTemplate):
146
  kwargs["blood_pressure"] = self.blood_pressure
147
  kwargs["food_intake"] = self.food_intake
148
  kwargs["current_time"] = self.current_time
 
149
 
150
  return self.template.format(**kwargs)
151
 
@@ -197,7 +199,7 @@ class PandaBot:
197
  # os.environ["LANGCHAIN_PROJECT"] = 'LLM Patient Monitoring'
198
 
199
 
200
- def query(self, input_text: str, username: str, memory: any, schedule_one:any, schedule_two:any, liquid_intake:any, temperature:any, blood_pressure:any, food_intake:any, current_time:any) -> str:
201
  prompt = CustomPromptTemplate(
202
  input_variables=["input", "intermediate_steps", "chat_history"],
203
  template=template,
@@ -210,7 +212,8 @@ class PandaBot:
210
  temperature=temperature,
211
  blood_pressure=blood_pressure,
212
  food_intake=food_intake,
213
- current_time=current_time)
 
214
 
215
  self.memory = memory
216
 
 
23
 
24
  Patient Information:
25
 
26
+ - Patient Name: '{name}'
27
  - Hemodialysis Schedule 1: {schedule_one}
28
  - Hemodialysis Schedule 2: {schedule_two}
29
  - Liquid Intake: {liquid_intake}
 
120
  blood_pressure: str
121
  food_intake: str
122
  current_time: str
123
+ name: str
124
 
125
  def format(self, **kwargs) -> str:
126
  # Get the intermediate steps (AgentAction, Observation tuples)
 
147
  kwargs["blood_pressure"] = self.blood_pressure
148
  kwargs["food_intake"] = self.food_intake
149
  kwargs["current_time"] = self.current_time
150
+ kwargs["name"] = self.name
151
 
152
  return self.template.format(**kwargs)
153
 
 
199
  # os.environ["LANGCHAIN_PROJECT"] = 'LLM Patient Monitoring'
200
 
201
 
202
+ def query(self, input_text: str, username: str, memory: any, schedule_one:any, schedule_two:any, liquid_intake:any, temperature:any, blood_pressure:any, food_intake:any, current_time:any, name:any) -> str:
203
  prompt = CustomPromptTemplate(
204
  input_variables=["input", "intermediate_steps", "chat_history"],
205
  template=template,
 
212
  temperature=temperature,
213
  blood_pressure=blood_pressure,
214
  food_intake=food_intake,
215
+ current_time=current_time,
216
+ name=name)
217
 
218
  self.memory = memory
219
 
server.py CHANGED
@@ -43,7 +43,7 @@ Output:
43
  # llm.memory.chat_memory.add_ai_message(AIMessage(response))
44
  # return jsonify({"response": translate})
45
 
46
- def simulate_llm_query(user_input, username, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake):
47
  """
48
  Simulates querying a language model.
49
  Replace this function's logic with actual LLM querying.
@@ -53,7 +53,7 @@ def simulate_llm_query(user_input, username, schedule_one, schedule_two, liquid_
53
 
54
  now = datetime.now()
55
  current_time = now.strftime("%A, %I:%M %p")
56
- return llm.query(user_input, username, memory, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake, current_time)
57
 
58
  @app.route('/query', methods=['POST'])
59
  def query_llm():
@@ -66,13 +66,14 @@ def query_llm():
66
  temperature = str(data.get('temperature'))
67
  blood_pressure = str(data.get('blood_pressure'))
68
  food_intake = str(data.get('food_intake'))
 
69
 
70
  input_translate = GoogleTranslator(source='id', target='en').translate(user_input)
71
 
72
  if not user_input:
73
  return jsonify({"error": "No input provided"}), 400
74
 
75
- response = simulate_llm_query(input_translate, username, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake)
76
 
77
  output_translate = GoogleTranslator(source='en', target='id').translate(response)
78
 
 
43
  # llm.memory.chat_memory.add_ai_message(AIMessage(response))
44
  # return jsonify({"response": translate})
45
 
46
+ def simulate_llm_query(user_input, username, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake, name):
47
  """
48
  Simulates querying a language model.
49
  Replace this function's logic with actual LLM querying.
 
53
 
54
  now = datetime.now()
55
  current_time = now.strftime("%A, %I:%M %p")
56
+ return llm.query(user_input, username, memory, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake, current_time, name)
57
 
58
  @app.route('/query', methods=['POST'])
59
  def query_llm():
 
66
  temperature = str(data.get('temperature'))
67
  blood_pressure = str(data.get('blood_pressure'))
68
  food_intake = str(data.get('food_intake'))
69
+ name = str(data.get('name'))
70
 
71
  input_translate = GoogleTranslator(source='id', target='en').translate(user_input)
72
 
73
  if not user_input:
74
  return jsonify({"error": "No input provided"}), 400
75
 
76
+ response = simulate_llm_query(input_translate, username, schedule_one, schedule_two, liquid_intake, temperature, blood_pressure, food_intake, name)
77
 
78
  output_translate = GoogleTranslator(source='en', target='id').translate(response)
79