@@ -198,8 +198,14 @@ def process_visual_qa(input_params,status_variables,filename):
198
198
local_file_path = download_file (bucket_name ,filename )
199
199
base64_images = encode_image_to_base64 (local_file_path ,filename )
200
200
status_variables ['answer' ]= generate_vision_answer_bedrock (_qa_llm ,base64_images , qa_modelId ,decoded_question )
201
- status_variables ['jobstatus' ] = JobStatus .DONE .status
202
- streaming = input_params .get ("streaming" , False )
201
+ if (status_variables ['answer' ] is None ):
202
+ status_variables ['answer' ] = JobStatus .ERROR_PREDICTION .status
203
+ error = JobStatus .ERROR_PREDICTION .get_message ()
204
+ status_variables ['answer' ] = error .decode ("utf-8" )
205
+ status_variables ['jobstatus' ] = JobStatus .ERROR_PREDICTION .status
206
+ else :
207
+ status_variables ['jobstatus' ] = JobStatus .DONE .status
208
+ streaming = input_params .get ("streaming" , False )
203
209
204
210
else :
205
211
logger .error ('Invalid Model , cannot load LLM , returning..' )
@@ -253,7 +259,7 @@ def generate_vision_answer_sagemaker(_qa_llm,input_params,decoded_question,statu
253
259
254
260
return status_variables
255
261
256
- def generate_vision_answer_bedrock (bedrock_client ,base64_images , model_id ,decoded_question ):
262
+ def generate_vision_answer_bedrock (bedrock_client ,base64_images ,model_id ,decoded_question ):
257
263
system_prompt = ""
258
264
# use system prompt for fine tuning the performamce
259
265
# system_prompt= """
@@ -293,10 +299,15 @@ def generate_vision_answer_bedrock(bedrock_client,base64_images, model_id,decode
293
299
}
294
300
295
301
body = json .dumps ({'messages' : [messages ],** claude_config , "system" : system_prompt })
296
- response = bedrock_client .invoke_model (
297
- body = body , modelId = model_id , accept = "application/json" ,
298
- contentType = "application/json"
299
- )
302
+ try :
303
+ response = bedrock_client .invoke_model (
304
+ body = body , modelId = model_id , accept = "application/json" ,
305
+ contentType = "application/json"
306
+ )
307
+ except Exception as err :
308
+ logger .exception (f'Error occurred , Reason :{ err } ' )
309
+ return None
310
+
300
311
response = json .loads (response ['body' ].read ().decode ('utf-8' ))
301
312
302
313
formated_response = response ['content' ][0 ]['text' ]
0 commit comments