modules/py_ml: Remove input/output callbacks.

This commit is contained in:
iabdalkader 2024-07-07 21:47:21 +03:00
parent 70b89f4744
commit 93e17a3a8d
3 changed files with 15 additions and 36 deletions

View File

@ -278,21 +278,14 @@ int ml_backend_init_model(py_ml_model_obj_t *model) {
return 0; return 0;
} }
int ml_backend_run_inference(py_ml_model_obj_t *model, int ml_backend_run_inference(py_ml_model_obj_t *model) {
ml_backend_input_callback_t input_callback,
void *input_arg,
ml_backend_output_callback_t output_callback,
void *output_arg) {
RegisterDebugLogCallback(ml_backend_log_handler); RegisterDebugLogCallback(ml_backend_log_handler);
ml_backend_state_t *state = (ml_backend_state_t *) model->state; ml_backend_state_t *state = (ml_backend_state_t *) model->state;
input_callback(model, input_arg);
if (state->interpreter->Invoke() != kTfLiteOk) { if (state->interpreter->Invoke() != kTfLiteOk) {
mp_raise_msg(&mp_type_ValueError, MP_ERROR_TEXT("Invoke failed")); mp_raise_msg(&mp_type_ValueError, MP_ERROR_TEXT("Invoke failed"));
} }
output_callback(model, output_arg);
return 0; return 0;
} }

View File

@ -37,9 +37,8 @@ static size_t py_ml_tuple_sum(mp_obj_tuple_t *o) {
return size; return size;
} }
// TF Input/Output callback functions. static void py_ml_process_input(py_ml_model_obj_t *model, mp_obj_t arg) {
static void py_ml_input_callback(py_ml_model_obj_t *model, void *arg) { mp_obj_list_t *input_list = MP_OBJ_TO_PTR(arg);
mp_obj_list_t *input_list = MP_OBJ_TO_PTR(*((mp_obj_t *) arg));
for (size_t i = 0; i < model->inputs_size; i++) { for (size_t i = 0; i < model->inputs_size; i++) {
void *input_buffer = ml_backend_get_input(model, i); void *input_buffer = ml_backend_get_input(model, i);
@ -102,7 +101,7 @@ static void py_ml_input_callback(py_ml_model_obj_t *model, void *arg) {
} }
} }
static void py_ml_output_callback(py_ml_model_obj_t *model, void *arg) { static mp_obj_t py_ml_process_output(py_ml_model_obj_t *model) {
mp_obj_list_t *output_list = MP_OBJ_TO_PTR(mp_obj_new_list(model->outputs_size, NULL)); mp_obj_list_t *output_list = MP_OBJ_TO_PTR(mp_obj_new_list(model->outputs_size, NULL));
for (size_t i = 0; i < model->outputs_size; i++) { for (size_t i = 0; i < model->outputs_size; i++) {
void *model_output = ml_backend_get_output(model, i); void *model_output = ml_backend_get_output(model, i);
@ -131,7 +130,7 @@ static void py_ml_output_callback(py_ml_model_obj_t *model, void *arg) {
} }
output_list->items[i] = MP_OBJ_FROM_PTR(output); output_list->items[i] = MP_OBJ_FROM_PTR(output);
} }
*((mp_obj_t *) arg) = MP_OBJ_FROM_PTR(output_list); return MP_OBJ_FROM_PTR(output_list);
} }
// TF Model Object. // TF Model Object.
@ -160,25 +159,22 @@ static mp_obj_t py_ml_model_predict(uint n_args, const mp_obj_t *pos_args, mp_ma
py_ml_model_obj_t *model = MP_OBJ_TO_PTR(pos_args[0]); py_ml_model_obj_t *model = MP_OBJ_TO_PTR(pos_args[0]);
mp_obj_t input_data = pos_args[1]; if (!MP_OBJ_IS_TYPE(pos_args[1], &mp_type_list)) {
ml_backend_input_callback_t input_callback = py_ml_input_callback;
mp_obj_t output_data;
ml_backend_output_callback_t output_callback = py_ml_output_callback;
if (!MP_OBJ_IS_TYPE(input_data, &mp_type_list)) {
mp_raise_msg(&mp_type_ValueError, MP_ERROR_TEXT("Unsupported input type. Expected a list")); mp_raise_msg(&mp_type_ValueError, MP_ERROR_TEXT("Unsupported input type. Expected a list"));
} }
ml_backend_run_inference(model, input_callback, &input_data, output_callback, &output_data); py_ml_process_input(model, pos_args[1]);
ml_backend_run_inference(model);
mp_obj_t output = py_ml_process_output(model);
if (args[ARG_callback].u_obj != mp_const_none) { if (args[ARG_callback].u_obj != mp_const_none) {
// Pass model, inputs, outputs to the post-processing callback. // Pass model, inputs, outputs to the post-processing callback.
mp_obj_t fargs[3] = { MP_OBJ_FROM_PTR(model), pos_args[1], output_data }; mp_obj_t fargs[3] = { MP_OBJ_FROM_PTR(model), pos_args[1], output };
output_data = mp_call_function_n_kw(args[ARG_callback].u_obj, 3, 0, fargs); output = mp_call_function_n_kw(args[ARG_callback].u_obj, 3, 0, fargs);
} }
return output_data; return output;
} }
static MP_DEFINE_CONST_FUN_OBJ_KW(py_ml_model_predict_obj, 2, py_ml_model_predict); static MP_DEFINE_CONST_FUN_OBJ_KW(py_ml_model_predict_obj, 2, py_ml_model_predict);

View File

@ -33,22 +33,12 @@ typedef struct py_ml_model_obj {
// Initialize a model. // Initialize a model.
int ml_backend_init_model(py_ml_model_obj_t *model); int ml_backend_init_model(py_ml_model_obj_t *model);
// Callback to populate the model input data. // Run inference.
typedef void (*ml_backend_input_callback_t) (py_ml_model_obj_t *model, void *arg); int ml_backend_run_inference(py_ml_model_obj_t *model);
// Callback to get the model output data.
typedef void (*ml_backend_output_callback_t) (py_ml_model_obj_t *model, void *arg);
// Return an input tensor by index. // Return an input tensor by index.
void *ml_backend_get_input(py_ml_model_obj_t *model, size_t index); void *ml_backend_get_input(py_ml_model_obj_t *model, size_t index);
// Return an output tensor by index. // Return an output tensor by index.
void *ml_backend_get_output(py_ml_model_obj_t *model, size_t index); void *ml_backend_get_output(py_ml_model_obj_t *model, size_t index);
// Run inference.
int ml_backend_run_inference(py_ml_model_obj_t *model,
ml_backend_input_callback_t input_callback, // Callback to populate the model input data.
void *input_data, // User data structure passed to input callback.
ml_backend_output_callback_t output_callback, // Callback to use the model output data.
void *output_data); // User data structure passed to output callback.
#endif // __PY_ML_H__ #endif // __PY_ML_H__