FFmpeg
dnn_backend_common.h
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 /**
20  * @file
21  * DNN common functions different backends.
22  */
23 
24 #ifndef AVFILTER_DNN_DNN_BACKEND_COMMON_H
25 #define AVFILTER_DNN_DNN_BACKEND_COMMON_H
26 
27 #include "queue.h"
28 #include "../dnn_interface.h"
29 #include "libavutil/thread.h"
30 
31 #define DNN_BACKEND_COMMON_OPTIONS \
32  { "nireq", "number of request", OFFSET(options.nireq), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, FLAGS }, \
33  { "async", "use DNN async inference", OFFSET(options.async), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
34 
35 // one task for one function call from dnn interface
36 typedef struct TaskItem {
37  void *model; // model for the backend
40  const char *input_name;
41  const char **output_names;
42  uint8_t async;
43  uint8_t do_ioproc;
44  uint32_t nb_output;
45  uint32_t inference_todo;
46  uint32_t inference_done;
47 } TaskItem;
48 
49 // one task might have multiple inferences
50 typedef struct LastLevelTaskItem {
52  uint32_t bbox_index;
54 
55 /**
56  * Common Async Execution Mechanism for the DNN Backends.
57  */
58 typedef struct DNNAsyncExecModule {
59  /**
60  * Synchronous inference function for the backend
61  * with corresponding request item as the argument.
62  */
63  int (*start_inference)(void *request);
64 
65  /**
66  * Completion Callback for the backend.
67  * Expected argument type of callback must match that
68  * of the inference function.
69  */
70  void (*callback)(void *args);
71 
72  /**
73  * Argument for the execution functions.
74  * i.e. Request item for the backend.
75  */
76  void *args;
77 #if HAVE_PTHREAD_CANCEL
78  pthread_t thread_id;
79  pthread_attr_t thread_attr;
80 #endif
82 
83 int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params);
84 
85 /**
86  * Fill the Task for Backend Execution. It should be called after
87  * checking execution parameters using ff_check_exec_params.
88  *
89  * @param task pointer to the allocated task
90  * @param exec_param pointer to execution parameters
91  * @param backend_model void pointer to the backend model
92  * @param async flag for async execution. Must be 0 or 1
93  * @param do_ioproc flag for IO processing. Must be 0 or 1
94  *
95  * @returns 0 if successful or error code otherwise.
96  */
97 int ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc);
98 
99 /**
100  * Join the Async Execution thread and set module pointers to NULL.
101  *
102  * @param async_module pointer to DNNAsyncExecModule module
103  *
104  * @returns 0 if successful or error code otherwise.
105  */
107 
108 /**
109  * Start asynchronous inference routine for the TensorFlow
110  * model on a detached thread. It calls the completion callback
111  * after the inference completes. Completion callback and inference
112  * function must be set before calling this function.
113  *
114  * If POSIX threads aren't supported, the execution rolls back
115  * to synchronous mode, calling completion callback after inference.
116  *
117  * @param ctx pointer to the backend context
118  * @param async_module pointer to DNNAsyncExecModule module
119  *
120  * @returns 0 on the start of async inference or error code otherwise.
121  */
122 int ff_dnn_start_inference_async(void *ctx, DNNAsyncExecModule *async_module);
123 
124 /**
125  * Extract input and output frame from the Task Queue after
126  * asynchronous inference.
127  *
128  * @param task_queue pointer to the task queue of the backend
129  * @param in double pointer to the input frame
130  * @param out double pointer to the output frame
131  *
132  * @retval DAST_EMPTY_QUEUE if task queue is empty
133  * @retval DAST_NOT_READY if inference not completed yet.
134  * @retval DAST_SUCCESS if result successfully extracted
135  */
137 
138 /**
139  * Allocate input and output frames and fill the Task
140  * with execution parameters.
141  *
142  * @param task pointer to the allocated task
143  * @param exec_params pointer to execution parameters
144  * @param backend_model void pointer to the backend model
145  * @param input_height height of input frame
146  * @param input_width width of input frame
147  * @param ctx pointer to the backend context
148  *
149  * @returns 0 if successful or error code otherwise.
150  */
151 int ff_dnn_fill_gettingoutput_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int input_height, int input_width, void *ctx);
152 
153 #endif
out
FILE * out
Definition: movenc.c:54
thread.h
DNNAsyncExecModule
Common Async Execution Mechanism for the DNN Backends.
Definition: dnn_backend_common.h:58
DNNFunctionType
DNNFunctionType
Definition: dnn_interface.h:52
ff_dnn_start_inference_async
int ff_dnn_start_inference_async(void *ctx, DNNAsyncExecModule *async_module)
Start asynchronous inference routine for the TensorFlow model on a detached thread.
Definition: dnn_backend_common.c:111
LastLevelTaskItem
Definition: dnn_backend_common.h:50
LastLevelTaskItem::bbox_index
uint32_t bbox_index
Definition: dnn_backend_common.h:52
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
TaskItem
Definition: dnn_backend_common.h:36
DNNAsyncExecModule::callback
void(* callback)(void *args)
Completion Callback for the backend.
Definition: dnn_backend_common.h:70
TaskItem::model
void * model
Definition: dnn_backend_common.h:37
Queue
Linear double-ended data structure.
Definition: queue.c:33
ff_dnn_fill_gettingoutput_task
int ff_dnn_fill_gettingoutput_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int input_height, int input_width, void *ctx)
Allocate input and output frames and fill the Task with execution parameters.
Definition: dnn_backend_common.c:162
LastLevelTaskItem::task
TaskItem * task
Definition: dnn_backend_common.h:51
pthread_attr_t
void pthread_attr_t
Definition: os2threads.h:51
ctx
AVFormatContext * ctx
Definition: movenc.c:48
TaskItem::inference_todo
uint32_t inference_todo
Definition: dnn_backend_common.h:45
ff_dnn_fill_task
int ff_dnn_fill_task(TaskItem *task, DNNExecBaseParams *exec_params, void *backend_model, int async, int do_ioproc)
Fill the Task for Backend Execution.
Definition: dnn_backend_common.c:56
TaskItem::in_frame
AVFrame * in_frame
Definition: dnn_backend_common.h:38
ff_dnn_async_module_cleanup
int ff_dnn_async_module_cleanup(DNNAsyncExecModule *async_module)
Join the Async Execution thread and set module pointers to NULL.
Definition: dnn_backend_common.c:92
TaskItem::async
uint8_t async
Definition: dnn_backend_common.h:42
TaskItem::inference_done
uint32_t inference_done
Definition: dnn_backend_common.h:46
DNNBackendType
DNNBackendType
Definition: dnn_interface.h:35
queue.h
pthread_t
Definition: os2threads.h:44
ff_check_exec_params
int ff_check_exec_params(void *ctx, DNNBackendType backend, DNNFunctionType func_type, DNNExecBaseParams *exec_params)
Definition: dnn_backend_common.c:29
ff_dnn_get_result_common
DNNAsyncStatusType ff_dnn_get_result_common(Queue *task_queue, AVFrame **in, AVFrame **out)
Extract input and output frame from the Task Queue after asynchronous inference.
Definition: dnn_backend_common.c:142
DNNAsyncExecModule::start_inference
int(* start_inference)(void *request)
Synchronous inference function for the backend with corresponding request item as the argument.
Definition: dnn_backend_common.h:63
DNNAsyncExecModule::args
void * args
Argument for the execution functions.
Definition: dnn_backend_common.h:76
TaskItem::output_names
const char ** output_names
Definition: dnn_backend_common.h:41
TaskItem::out_frame
AVFrame * out_frame
Definition: dnn_backend_common.h:39
TaskItem::input_name
const char * input_name
Definition: dnn_backend_common.h:40
DNNExecBaseParams
Definition: dnn_interface.h:67
TaskItem::do_ioproc
uint8_t do_ioproc
Definition: dnn_backend_common.h:43
int
int
Definition: ffmpeg_filter.c:153
DNNAsyncStatusType
DNNAsyncStatusType
Definition: dnn_interface.h:45
TaskItem::nb_output
uint32_t nb_output
Definition: dnn_backend_common.h:44