khurrameycon commited on
Commit
0cef40a
·
verified ·
1 Parent(s): c0658b2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -1
app.py CHANGED
@@ -164,12 +164,31 @@ def llm_chat_response(text: str, image_url: Optional[str] = None) -> str:
164
  ]
165
 
166
  logger.info("Sending request to model...")
 
 
167
  completion = client.chat.completions.create(
168
  model="meta-llama/Llama-3.2-11B-Vision-Instruct",
169
  messages=messages,
170
  max_tokens=500
171
  )
172
- return completion.choices[0].message['content']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  except Exception as e:
174
  logger.error(f"Error in llm_chat_response: {str(e)}")
175
  raise HTTPException(status_code=500, detail=str(e))
 
164
  ]
165
 
166
  logger.info("Sending request to model...")
167
+ logger.info(f"Request payload: {messages}")
168
+
169
  completion = client.chat.completions.create(
170
  model="meta-llama/Llama-3.2-11B-Vision-Instruct",
171
  messages=messages,
172
  max_tokens=500
173
  )
174
+
175
+ logger.info(f"Response received: {completion}")
176
+
177
+ # Check the structure of the response and extract content
178
+ if hasattr(completion, 'choices') and len(completion.choices) > 0:
179
+ message = completion.choices[0].message
180
+ # Handle different response formats
181
+ if isinstance(message, dict) and 'content' in message:
182
+ return message['content']
183
+ elif hasattr(message, 'content'):
184
+ return message.content
185
+ else:
186
+ logger.error(f"Unexpected message format: {message}")
187
+ return str(message)
188
+ else:
189
+ logger.error(f"Unexpected completion format: {completion}")
190
+ return str(completion)
191
+
192
  except Exception as e:
193
  logger.error(f"Error in llm_chat_response: {str(e)}")
194
  raise HTTPException(status_code=500, detail=str(e))