[FFmpeg-devel] [PATCH 2/2] lavfi/dnn_backend_ov: Rename RequestItem to OVRequestItem
Guo, Yejun
yejun.guo at intel.com
Wed Jul 21 16:04:43 EEST 2021
> -----Original Message-----
> From: ffmpeg-devel <ffmpeg-devel-bounces at ffmpeg.org> On Behalf Of
> Shubhanshu Saxena
> Sent: 2021年7月12日 0:15
> To: ffmpeg-devel at ffmpeg.org
> Cc: Shubhanshu Saxena <shubhanshu.e01 at gmail.com>
> Subject: [FFmpeg-devel] [PATCH 2/2] lavfi/dnn_backend_ov: Rename
> RequestItem to OVRequestItem
>
> Rename RequestItem to OVRequestItem in the OpenVINO backend to avoid
> confusion.
>
> Signed-off-by: Shubhanshu Saxena <shubhanshu.e01 at gmail.com>
> ---
> libavfilter/dnn/dnn_backend_openvino.c | 24 ++++++++++++------------
> 1 file changed, 12 insertions(+), 12 deletions(-)
>
> diff --git a/libavfilter/dnn/dnn_backend_openvino.c
> b/libavfilter/dnn/dnn_backend_openvino.c
> index b340859c12..f8d548feaf 100644
> --- a/libavfilter/dnn/dnn_backend_openvino.c
> +++ b/libavfilter/dnn/dnn_backend_openvino.c
> @@ -54,18 +54,18 @@ typedef struct OVModel{
> ie_core_t *core;
> ie_network_t *network;
> ie_executable_network_t *exe_network;
> - SafeQueue *request_queue; // holds RequestItem
> + SafeQueue *request_queue; // holds OVRequestItem
> Queue *task_queue; // holds TaskItem
> Queue *inference_queue; // holds InferenceItem
> } OVModel;
>
> // one request for one call to openvino -typedef struct RequestItem {
> +typedef struct OVRequestItem {
> ie_infer_request_t *infer_request;
> InferenceItem **inferences;
> uint32_t inference_count;
> ie_complete_call_back_t callback;
> -} RequestItem;
> +} OVRequestItem;
>
> #define APPEND_STRING(generated_string, iterate_string)
> \
> generated_string = generated_string ? av_asprintf("%s %s",
> generated_string, iterate_string) : \ @@ -111,7 +111,7 @@ static int
> get_datatype_size(DNNDataType dt)
> }
> }
>
> -static DNNReturnType fill_model_input_ov(OVModel *ov_model,
> RequestItem *request)
> +static DNNReturnType fill_model_input_ov(OVModel *ov_model,
> +OVRequestItem *request)
> {
> dimensions_t dims;
> precision_e precision;
> @@ -198,7 +198,7 @@ static void infer_completion_callback(void *args)
> dimensions_t dims;
> precision_e precision;
> IEStatusCode status;
> - RequestItem *request = args;
> + OVRequestItem *request = args;
> InferenceItem *inference = request->inferences[0];
> TaskItem *task = inference->task;
> OVModel *ov_model = task->model;
> @@ -381,7 +381,7 @@ static DNNReturnType init_model_ov(OVModel
> *ov_model, const char *input_name, co
> }
>
> for (int i = 0; i < ctx->options.nireq; i++) {
> - RequestItem *item = av_mallocz(sizeof(*item));
> + OVRequestItem *item = av_mallocz(sizeof(*item));
> if (!item) {
> goto err;
> }
> @@ -422,7 +422,7 @@ err:
> return DNN_ERROR;
> }
>
> -static DNNReturnType execute_model_ov(RequestItem *request, Queue
> *inferenceq)
> +static DNNReturnType execute_model_ov(OVRequestItem *request,
> Queue
> +*inferenceq)
> {
> IEStatusCode status;
> DNNReturnType ret;
> @@ -639,7 +639,7 @@ static DNNReturnType get_output_ov(void *model,
> const char *input_name, int inpu
> OVModel *ov_model = model;
> OVContext *ctx = &ov_model->ctx;
> TaskItem task;
> - RequestItem *request;
> + OVRequestItem *request;
> AVFrame *in_frame = NULL;
> AVFrame *out_frame = NULL;
> IEStatusCode status;
> @@ -779,7 +779,7 @@ DNNReturnType ff_dnn_execute_model_ov(const
> DNNModel *model, DNNExecBaseParams *
> OVModel *ov_model = model->model;
> OVContext *ctx = &ov_model->ctx;
> TaskItem task;
> - RequestItem *request;
> + OVRequestItem *request;
>
> if (ff_check_exec_params(ctx, DNN_OV, model->func_type,
> exec_params) != 0) {
> return DNN_ERROR;
> @@ -827,7 +827,7 @@ DNNReturnType
> ff_dnn_execute_model_async_ov(const DNNModel *model,
> DNNExecBasePa {
> OVModel *ov_model = model->model;
> OVContext *ctx = &ov_model->ctx;
> - RequestItem *request;
> + OVRequestItem *request;
> TaskItem *task;
> DNNReturnType ret;
>
> @@ -904,7 +904,7 @@ DNNReturnType ff_dnn_flush_ov(const DNNModel
> *model) {
> OVModel *ov_model = model->model;
> OVContext *ctx = &ov_model->ctx;
> - RequestItem *request;
> + OVRequestItem *request;
> IEStatusCode status;
> DNNReturnType ret;
>
> @@ -943,7 +943,7 @@ void ff_dnn_free_model_ov(DNNModel **model)
> if (*model){
> OVModel *ov_model = (*model)->model;
> while (ff_safe_queue_size(ov_model->request_queue) != 0) {
> - RequestItem *item = ff_safe_queue_pop_front(ov_model-
> >request_queue);
> + OVRequestItem *item =
> + ff_safe_queue_pop_front(ov_model->request_queue);
> if (item && item->infer_request) {
> ie_infer_request_free(&item->infer_request);
> }
> --
Thanks, will push soon.
More information about the ffmpeg-devel
mailing list