32 #define INTERPOLATE_METHOD(name) \ 33 static uint8_t name(float x, float y, const uint8_t *src, \ 34 int width, int height, int stride, uint8_t def) 36 #define PIXEL(img, x, y, w, h, stride, def) \ 37 ((x) < 0 || (y) < 0) ? (def) : \ 38 (((x) >= (w) || (y) >= (h)) ? (def) : \ 39 img[(x) + (y) * (stride)]) 54 int x_c, x_f, y_c, y_f;
71 return (v1*(x - x_f)*(y - y_f) + v2*((x - x_f)*(y_c - y)) +
72 v3*(x_c - x)*(y - y_f) + v4*((x_c - x)*(y_c - y)));
81 int x_c, x_f, y_c, y_f;
98 f1 = 1 - sqrt((x_c - x) * (y_c - y));
99 f2 = 1 - sqrt((x_c - x) * (y - y_f));
100 f3 = 1 - sqrt((x - x_f) * (y_c - y));
101 f4 = 1 - sqrt((x - x_f) * (y - y_f));
102 return (v1 * f1 + v2 * f2 + v3 * f3 + v4 * f4) / (f1 + f2 + f3 + f4);
114 matrix[0] = scale_x * cos(angle);
115 matrix[1] = -sin(angle);
117 matrix[3] = -matrix[1];
118 matrix[4] = scale_y * cos(angle);
128 for (i = 0; i < 9; i++)
129 result[i] = m1[i] + m2[i];
135 for (i = 0; i < 9; i++)
136 result[i] = m1[i] - m2[i];
142 for (i = 0; i < 9; i++)
143 result[i] = m1[i] * scalar;
147 int src_stride,
int dst_stride,
157 switch(interpolate) {
159 func = interpolate_nearest;
162 func = interpolate_bilinear;
165 func = interpolate_biquadratic;
171 for (y = 0; y <
height; y++) {
172 for(x = 0; x <
width; x++) {
173 x_s = x * matrix[0] + y * matrix[1] + matrix[2];
174 y_s = x * matrix[3] + y * matrix[4] + matrix[5];
178 def = src[y * src_stride + x];
183 def = src[(
int)y_s * src_stride + (
int)x_s];
191 def = src[(
int)y_s * src_stride + (
int)x_s];
194 dst[y * dst_stride + x] =
func(x_s, y_s, src, width, height, src_stride, def);
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
static void interpolate(float *out, float v1, float v2, int size)
simple assert() macros that are a bit more flexible than ISO C assert().
static av_always_inline av_const int avpriv_mirror(int x, int w)
int(* func)(AVBPrint *dst, const char *in, const char *arg)
GLint GLenum GLboolean GLsizei stride
common internal and external API header
and forward the result(frame or status change) to the corresponding input.If nothing is possible
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions