[FFmpeg-devel] [PATCH 2/2] lavf: Add coreimage filter for GPU based image filtering on OSX.

Thilo Borgmann thilo.borgmann at mail.de
Sat Mar 12 23:09:00 CET 2016


Am 12.03.16 um 21:45 schrieb Michael Niedermayer:
> On Sat, Mar 12, 2016 at 03:19:11PM +0100, Thilo Borgmann wrote:
>> Am 12.03.16 um 15:14 schrieb Thilo Borgmann:
>>> Add coreimage filter for OSX.
>>
>> Corrected patch attached.
>>
>> -Thilo
>>
> 
>>  Changelog                  |    1 
>>  MAINTAINERS                |    1 
>>  configure                  |    3 
>>  doc/filters.texi           |   67 +++++
>>  libavfilter/Makefile       |    1 
>>  libavfilter/allfilters.c   |    1 
>>  libavfilter/vf_coreimage.m |  553 +++++++++++++++++++++++++++++++++++++++++++++
>>  7 files changed, 627 insertions(+)
>> 2bc3194273f31211d7ee3effc2af6c6d1b393db2  0002-lavf-Add-coreimage-filter-for-GPU-based-image-filter.patch
>> From 124fb0ce2a30c965ec28c8fbff1c04a872f33bc6 Mon Sep 17 00:00:00 2001
>> From: Thilo Borgmann <thilo.borgmann at mail.de>
>> Date: Sat, 12 Mar 2016 15:18:04 +0100
>> Subject: [PATCH 2/2] lavf: Add coreimage filter for GPU based image filtering
>>  on OSX.
>>
>> ---
>>  Changelog                  |   1 +
>>  MAINTAINERS                |   1 +
>>  configure                  |   3 +
>>  doc/filters.texi           |  67 ++++++
>>  libavfilter/Makefile       |   1 +
>>  libavfilter/allfilters.c   |   1 +
>>  libavfilter/vf_coreimage.m | 553 +++++++++++++++++++++++++++++++++++++++++++++
>>  7 files changed, 627 insertions(+)
>>  create mode 100644 libavfilter/vf_coreimage.m
>>
>> diff --git a/Changelog b/Changelog
>> index 1f57f5e..5053a86 100644
>> --- a/Changelog
>> +++ b/Changelog
>> @@ -12,6 +12,7 @@ version <next>:
>>  - ciescope filter
>>  - protocol blacklisting API
>>  - MediaCodec H264 decoding
>> +- coreimage filter (GPU based image filtering on OSX)
>>  
>>  
>>  version 3.0:
>> diff --git a/MAINTAINERS b/MAINTAINERS
>> index 531c21d..a993a67 100644
>> --- a/MAINTAINERS
>> +++ b/MAINTAINERS
>> @@ -370,6 +370,7 @@ Filters:
>>    vf_colorbalance.c                     Paul B Mahol
>>    vf_colorkey.c                         Timo Rothenpieler
>>    vf_colorlevels.c                      Paul B Mahol
>> +  vf_coreimage.m                        Thilo Borgmann
>>    vf_deband.c                           Paul B Mahol
>>    vf_dejudder.c                         Nicholas Robbins
>>    vf_delogo.c                           Jean Delvare (CC <jdelvare at suse.com>)
>> diff --git a/configure b/configure
>> index 1b189328..fe96215 100755
>> --- a/configure
>> +++ b/configure
>> @@ -5255,6 +5255,7 @@ frei0r_filter_extralibs='$ldl'
>>  frei0r_src_filter_extralibs='$ldl'
>>  ladspa_filter_extralibs='$ldl'
>>  nvenc_encoder_extralibs='$ldl'
>> +coreimage_filter_extralibs="-framework QuartzCore -framework AppKit -framework OpenGL"
>>  
>>  if ! disabled network; then
>>      check_func getaddrinfo $network_extralibs
> 
>> @@ -5483,6 +5484,8 @@ enabled avisynth          && { { check_lib2 "windows.h" LoadLibrary; } ||
>>                                 die "ERROR: LoadLibrary/dlopen not found for avisynth"; }
>>  enabled cuda              && check_lib cuda.h cuInit -lcuda
>>  enabled chromaprint       && require chromaprint chromaprint.h chromaprint_get_version -lchromaprint
>> +enabled coreimage         && { check_header_oc CoreImage/CoreImage.h || die "ERROR: CoreImage.h header not found"; }
>> +#enabled coreimage         && { check_lib2 CoreImage/CoreImage.h CGGetActiveDisplayList -framework CoreImage; }
> 
> this looks suspect

Removed.

> also breaks build on linux
> ./configure && make -j12
> ...
> libavfilter/vaf_spectrumsynth.c:434:9: warning: ‘ret’ may be used uninitialized in this function [-Wuninitialized]
> CC      libavfilter/vf_copy.o
> OBJCC   libavfilter/vf_coreimage.o
> gcc: error trying to exec 'cc1obj': execvp: No such file or directory
> make: *** [libavfilter/vf_coreimage.o] Error 1
> make: *** Waiting for unfinished jobs....

Should be fixed in attached patch.

-Thilo

-------------- next part --------------
From 3238a251ddd10dce64ed27abf30cf5d4b78f1b87 Mon Sep 17 00:00:00 2001
From: Thilo Borgmann <thilo.borgmann at mail.de>
Date: Sat, 12 Mar 2016 22:53:33 +0100
Subject: [PATCH 2/2] lavf: Add coreimage filter for GPU based image filtering
 on OSX.

---
 Changelog                  |   1 +
 MAINTAINERS                |   1 +
 configure                  |   2 +
 doc/filters.texi           |  67 ++++++
 libavfilter/Makefile       |   1 +
 libavfilter/allfilters.c   |   1 +
 libavfilter/vf_coreimage.m | 553 +++++++++++++++++++++++++++++++++++++++++++++
 7 files changed, 626 insertions(+)
 create mode 100644 libavfilter/vf_coreimage.m

diff --git a/Changelog b/Changelog
index 1f57f5e..5053a86 100644
--- a/Changelog
+++ b/Changelog
@@ -12,6 +12,7 @@ version <next>:
 - ciescope filter
 - protocol blacklisting API
 - MediaCodec H264 decoding
+- coreimage filter (GPU based image filtering on OSX)
 
 
 version 3.0:
diff --git a/MAINTAINERS b/MAINTAINERS
index 531c21d..a993a67 100644
--- a/MAINTAINERS
+++ b/MAINTAINERS
@@ -370,6 +370,7 @@ Filters:
   vf_colorbalance.c                     Paul B Mahol
   vf_colorkey.c                         Timo Rothenpieler
   vf_colorlevels.c                      Paul B Mahol
+  vf_coreimage.m                        Thilo Borgmann
   vf_deband.c                           Paul B Mahol
   vf_dejudder.c                         Nicholas Robbins
   vf_delogo.c                           Jean Delvare (CC <jdelvare at suse.com>)
diff --git a/configure b/configure
index 1b189328..1693f3b 100755
--- a/configure
+++ b/configure
@@ -5255,6 +5255,7 @@ frei0r_filter_extralibs='$ldl'
 frei0r_src_filter_extralibs='$ldl'
 ladspa_filter_extralibs='$ldl'
 nvenc_encoder_extralibs='$ldl'
+coreimage_filter_extralibs="-framework QuartzCore -framework AppKit -framework OpenGL"
 
 if ! disabled network; then
     check_func getaddrinfo $network_extralibs
@@ -5483,6 +5484,7 @@ enabled avisynth          && { { check_lib2 "windows.h" LoadLibrary; } ||
                                die "ERROR: LoadLibrary/dlopen not found for avisynth"; }
 enabled cuda              && check_lib cuda.h cuInit -lcuda
 enabled chromaprint       && require chromaprint chromaprint.h chromaprint_get_version -lchromaprint
+enabled coreimage         && { check_header_objcc CoreImage/CoreImage.h || die "ERROR: CoreImage.h header not found"; }
 enabled decklink          && { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; }
 enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
 enabled gmp               && require2 gmp gmp.h mpz_export -lgmp
diff --git a/doc/filters.texi b/doc/filters.texi
index d5d619e..7fdbe61 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -4955,6 +4955,73 @@ convolution="-2 -1 0 -1 1 1 0 1 2:-2 -1 0 -1 1 1 0 1 2:-2 -1 0 -1 1 1 0 1 2:-2 -
 Copy the input source unchanged to the output. This is mainly useful for
 testing purposes.
 
+ at anchor{coreimage}
+ at section coreimage
+
+Video filtering on GPU using Apple's CoreImage API on OSX.
+
+Hardware acceleration is based on an OpenGL context. Usually, this means it is processed by video hardware. However, software-based OpenGL implementations exist which means there is no guarantee for hardware processing. It depends on the respective OSX.
+
+There are many filters and image generators provided by Apple that come with a large variety of options. The filter has to be referenced by its name along with its options.
+
+The coreimage filter accepts the following options:
+ at table @option
+ at item list_filters
+List all available filters along with all their respective options as well as possible minimum and maximum values along with the default values.
+ at example
+    coreimage=list_filters=true
+ at end example
+
+ at item filter
+Specifiy all filters by their respective name and options.
+Use @option{list_filters} to determine all valid filter names and options.
+Numerical options are specified by a float value and are automatically clamped to their respective value range.
+Vector and color options have to be specified by a list of space separated float values. Character escaping has to be done.
+A special option name @code{default} is available to use default options for a filter.
+It is required to specify either @code{default} or at least one of the filter options.
+All omitted options are used with their default values.
+The syntax of the filter string is as follows:
+ at example
+filter=<NAME>@<OPTION>=<VALUE>[@<OPTION>=<VALUE>][@...][#<NAME>@<OPTION>=<VALUE>[@<OPTION>=<VALUE>][@...]]
+ at end example
+ at end table
+
+Several filters can be chained for successive processing without GPU-HOST transfers allowing for fast processing of complex filter chains.
+Currently, only filters with zero (generators) or exactly one (filters) input image and one output image are supported.
+Also, transition filters are not yet usable as intended.
+
+Some filters generate output images with additional padding depending on the respective filter kernel. The padding is automatically removed to ensure the filter output has the same size as the input image.
+For image generators, the size of the output image is determined by the given input image. The generators do not use the pixel information of the input image to generate their output. However, the generated output is blended onto the input image, resulting in partial or complete coverage of the output image.
+
+ at subsection Examples
+
+ at itemize
+
+ at item
+List all filters available:
+ at example
+coreimage=list_filters=true
+ at end example
+
+ at item
+Use the CIBoxBlur filter with default options to blur an image:
+ at example
+coreimage=filter=CIBoxBlur at default
+ at end example
+
+ at item
+Use a filter chain with CISepiaTone at default values and CIVignetteEffect with its center at 100x100 and a radius of 50 pixels:
+ at example
+coreimage=filter=CIBoxBlur at default#CIVignetteEffect at inputCenter=100\ 100 at inputRadius=50
+ at end example
+
+ at item
+Use nullsrc and CIQRCodeGenerator to create a QR code for the FFmpeg homepage, given as complete and escaped command-line for Apple's standard bash shell:
+ at example
+./ffmpeg -f lavfi -i nullsrc=s=100x100,coreimage=filter=CIQRCodeGenerator at inputMessage=https\\\\\://FFmpeg.org/@inputCorrectionLevel=H -frames:v 1 QRCode.png
+ at end example
+ at end itemize
+
 @section crop
 
 Crop the input video to given dimensions.
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 956a077..9ce6559 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -133,6 +133,7 @@ OBJS-$(CONFIG_COLORLEVELS_FILTER)            += vf_colorlevels.o
 OBJS-$(CONFIG_COLORMATRIX_FILTER)            += vf_colormatrix.o
 OBJS-$(CONFIG_CONVOLUTION_FILTER)            += vf_convolution.o
 OBJS-$(CONFIG_COPY_FILTER)                   += vf_copy.o
+OBJS-$(CONFIG_COREIMAGE_FILTER)              += vf_coreimage.o
 OBJS-$(CONFIG_COVER_RECT_FILTER)             += vf_cover_rect.o lavfutils.o
 OBJS-$(CONFIG_CROP_FILTER)                   += vf_crop.o
 OBJS-$(CONFIG_CROPDETECT_FILTER)             += vf_cropdetect.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index e5080b5..91b0dde 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -154,6 +154,7 @@ void avfilter_register_all(void)
     REGISTER_FILTER(COLORMATRIX,    colormatrix,    vf);
     REGISTER_FILTER(CONVOLUTION,    convolution,    vf);
     REGISTER_FILTER(COPY,           copy,           vf);
+    REGISTER_FILTER(COREIMAGE,      coreimage,      vf);
     REGISTER_FILTER(COVER_RECT,     cover_rect,     vf);
     REGISTER_FILTER(CROP,           crop,           vf);
     REGISTER_FILTER(CROPDETECT,     cropdetect,     vf);
diff --git a/libavfilter/vf_coreimage.m b/libavfilter/vf_coreimage.m
new file mode 100644
index 0000000..94539db
--- /dev/null
+++ b/libavfilter/vf_coreimage.m
@@ -0,0 +1,553 @@
+/*
+ * Copyright (c) 2016 Thilo Borgmann
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+/**
+ * @file
+ * Video processing based on Apple's CoreImage API
+ */
+
+#import <QuartzCore/CoreImage.h>
+#import <AppKit/AppKit.h>
+
+#include "avfilter.h"
+#include "formats.h"
+#include "internal.h"
+#include "video.h"
+#include "libavutil/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+
+typedef struct CoreImageContext {
+    const AVClass *class;
+
+    CFTypeRef       glctx;              ///< OpenGL context
+    CGContextRef    cgctx;              ///< Bitmap context for image copy
+    CFTypeRef       input_image;        ///< Input image container for passing into Core Image API
+    CGColorSpaceRef color_space;        ///< Common color space for input image and cgcontext
+    int             bits_per_component; ///< Shared bpc for input-output operation
+
+    char            *filter_string;     ///< The complete user provided filter definition
+    CFTypeRef       *filters;           ///< CIFilter object for all requested filters
+    int             num_filters;        ///< Amount of filters in *filters
+
+    int             list_filters;       ///< Option used to list all available filters
+} CoreImageContext;
+
+/** Determine image properties from input link of filter chain.
+ */
+static int config_input(AVFilterLink *link)
+{
+    CoreImageContext *ctx          = link->dst->priv;
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
+    ctx->bits_per_component        = av_get_bits_per_pixel(desc) / desc->nb_components;
+
+    return 0;
+}
+
+/** Print a list of all available filters including options and respective value ranges and defaults.
+ */
+static void list_filters(CoreImageContext *ctx)
+{
+    // querying filters and attributes
+    NSArray *filter_names = [CIFilter filterNamesInCategories:nil];
+    NSEnumerator *filters = [filter_names objectEnumerator];
+
+    NSString *filter_name;
+    while (filter_name = [filters nextObject]) {
+        av_log(ctx, AV_LOG_INFO, "Filter: %s\n", [filter_name UTF8String]);
+        NSString *input;
+
+        CIFilter *filter             = [CIFilter filterWithName:filter_name];
+        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
+        NSArray      *filter_inputs  = [filter inputKeys];  // <nsstring>
+
+        for (input in filter_inputs) {
+            NSDictionary *input_attribs = [filter_attribs valueForKey:input];
+            NSString *input_class       = [input_attribs valueForKey:kCIAttributeClass];
+            if ([input_class isEqualToString:@"NSNumber"]) {
+                NSNumber *value_default = [input_attribs valueForKey:kCIAttributeDefault];
+                NSNumber *value_min     = [input_attribs valueForKey:kCIAttributeSliderMin];
+                NSNumber *value_max     = [input_attribs valueForKey:kCIAttributeSliderMax];
+
+                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\t[%s %s][%s]\n",
+                    [input UTF8String],
+                    [input_class UTF8String],
+                    [[value_min stringValue] UTF8String],
+                    [[value_max stringValue] UTF8String],
+                    [[value_default stringValue] UTF8String]);
+            } else {
+                av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\n",
+                    [input UTF8String],
+                    [input_class UTF8String]);
+            }
+        }
+    }
+}
+
+/** Get an appropriate video buffer for filter processing.
+ */
+static AVFrame *get_video_buffer(AVFilterLink *link, int w, int h)
+{
+    CoreImageContext *ctx = link->dst->priv;
+    AVFrame *frame;
+
+    frame = ff_get_video_buffer(link->dst->outputs[0], w, h);
+
+    if (!frame) {
+        av_log(ctx, AV_LOG_ERROR, "Getting video buffer failed.\n");
+    }
+
+    return frame;
+}
+
+/** Define input and output formats for this filter.
+ */
+static int query_formats(AVFilterContext *fctx)
+{
+    static const enum AVPixelFormat inout_fmts_rgb[] = {
+        AV_PIX_FMT_ARGB,
+        AV_PIX_FMT_NONE
+    };
+
+    AVFilterFormats *inout_formats;
+    int ret;
+
+    if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
+        return (AVERROR(ENOMEM));
+    }
+
+    if ((ret = ff_formats_ref(inout_formats, &fctx->inputs[0]->out_formats)) < 0 || // out
+        (ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) { // in
+        return ret;
+    }
+
+    return 0;
+}
+
+/** Apply all valid filters successively to the input image.
+ *  The final output image is copied from the GPU by "drawing" using a bitmap context.
+ */
+static int filter_frame(AVFilterLink *link, AVFrame *frame)
+{
+    CoreImageContext *ctx = link->dst->priv;
+    int i;
+
+    // assume one input image and one output image for now
+    if (!frame->data[0]) {
+        av_log(ctx, AV_LOG_ERROR, "No input image given.");
+        return AVERROR(EINVAL);
+    }
+
+    // (re-)initialize input image
+    const CGSize frame_size = {
+        frame->width,
+        frame->height
+    };
+
+    NSData *data = [NSData dataWithBytesNoCopy:frame->data[0]
+                           length:frame->height*frame->linesize[0]
+                           freeWhenDone:NO];
+
+    CIImage *ret = [(__bridge CIImage*)ctx->input_image initWithBitmapData:data
+                                                        bytesPerRow:frame->linesize[0]
+                                                        size:frame_size
+                                                        format:kCIFormatARGB8
+                                                        colorSpace:ctx->color_space]; //kCGColorSpaceGenericRGB
+    if (!ret) {
+        av_log(ctx, AV_LOG_ERROR, "Input image could not be initialized.\n");
+        return AVERROR_EXTERNAL;
+    }
+
+    CIFilter *filter       = NULL;
+    CIImage *filter_input  = (__bridge CIImage*)ctx->input_image;
+    CIImage *filter_output = NULL;
+
+    // successively apply all filters
+    for (i = 0; i < ctx->num_filters; i++) {
+        if (i) {
+            // set filter input to previous filter output
+            filter_input    = [(__bridge CIImage*)ctx->filters[i-1] valueForKey:kCIOutputImageKey];
+            CGRect out_rect = [filter_input extent];
+            if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
+                // do not keep padded image regions after filtering
+                out_rect.origin.x    = 0.0f;
+                out_rect.origin.y    = 0.0f;
+                out_rect.size.width  = frame->width;
+                out_rect.size.height = frame->height;
+            }
+            filter_input = [filter_input imageByCroppingToRect:out_rect];
+        }
+
+        filter = (__bridge CIFilter*)ctx->filters[i];
+
+        @try {
+            [filter setValue:filter_input forKey:kCIInputImageKey];
+        } @catch (NSException *exception) {
+            if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
+                av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
+                return AVERROR_EXTERNAL;
+            } else {
+                av_log(ctx, AV_LOG_WARNING, "Selected filter does not accept an input image.\n");
+            }
+        }
+    }
+
+    // get output of last filter
+    filter_output = [filter valueForKey:kCIOutputImageKey];
+
+    if (!filter_output) {
+        av_log(ctx, AV_LOG_ERROR, "Filter output not available.\n");
+        return AVERROR_EXTERNAL;
+    }
+
+    // do not keep padded image regions after filtering
+    CGRect out_rect = [filter_output extent];
+    if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
+        av_log(ctx, AV_LOG_DEBUG, "Cropping output image.\n");
+        out_rect.origin.x    = 0.0f;
+        out_rect.origin.y    = 0.0f;
+        out_rect.size.width  = frame->width;
+        out_rect.size.height = frame->height;
+    }
+
+    CGImageRef out = [(__bridge CIContext*)ctx->glctx createCGImage:filter_output
+                                                      fromRect:out_rect];
+
+    if (!out) {
+        av_log(ctx, AV_LOG_ERROR, "Cannot create valid output image.\n");
+    }
+
+    // create bitmap context on the fly for rendering into current frame->data[]
+    if (ctx->cgctx) {
+        CGContextRelease(ctx->cgctx);
+        ctx->cgctx = NULL;
+    }
+    size_t out_width    = CGImageGetWidth(out);
+    size_t out_height   = CGImageGetHeight(out);
+
+    if (out_width > frame->width || out_height > frame->height) { // this might result in segfault
+        av_log(ctx, AV_LOG_WARNING, "Output image has unexpected size: %lux%lu (expected: %ix%i). This may crash...\n",
+               out_width, out_height, frame->width, frame->height);
+    }
+    ctx->cgctx = CGBitmapContextCreate(frame->data[0],
+                                       frame->width,
+                                       frame->height,
+                                       ctx->bits_per_component,
+                                       frame->linesize[0],
+                                       ctx->color_space,
+                                       (uint32_t)kCGImageAlphaPremultipliedFirst); // ARGB
+    if (!ctx->cgctx) {
+        av_log(ctx, AV_LOG_ERROR, "CGBitmap context cannot be created.\n");
+        return AVERROR_EXTERNAL;
+    }
+
+    // copy ("draw") the output image into the frame data
+    const CGRect rect = {{0,0},{frame->width, frame->height}};
+    CGContextDrawImage(ctx->cgctx, rect, out);
+
+    return ff_filter_frame(link->dst->outputs[0], frame);
+}
+
+/** Set an option of the given filter to the provided key-value pair.
+ */
+static void set_option(CoreImageContext *ctx, CIFilter *filter, const char *key, const char *value)
+{
+        NSString *input_key = [NSString stringWithUTF8String:key];
+        NSString *input_val = [NSString stringWithUTF8String:value];
+
+        NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
+        NSDictionary *input_attribs  = [filter_attribs valueForKey:input_key];
+
+        NSString *input_class = [input_attribs valueForKey:kCIAttributeClass];
+        NSString *input_type  = [input_attribs valueForKey:kCIAttributeType];
+
+        if (!input_attribs) {
+            av_log(ctx, AV_LOG_WARNING, "Skipping unknown option: \"%s\".\n",
+                   [input_key UTF8String]); // [[filter name] UTF8String]) not currently defined...
+            return;
+        }
+
+        av_log(ctx, AV_LOG_DEBUG, "key: %s, val: %s, #attribs: %lu, class: %s, type: %s\n",
+               [input_key UTF8String],
+               [input_val UTF8String],
+               input_attribs ? (unsigned long)[input_attribs count] : -1,
+               [input_class UTF8String],
+               [input_type UTF8String]);
+
+        if ([input_class isEqualToString:@"NSNumber"]) {
+            float input          = input_val.floatValue;
+            NSNumber *max_value  = [input_attribs valueForKey:kCIAttributeSliderMax];
+            NSNumber *min_value  = [input_attribs valueForKey:kCIAttributeSliderMin];
+            NSNumber *used_value = nil;
+
+#define CLAMP_WARNING {        \
+av_log(ctx, AV_LOG_WARNING, "Value of \"%f\" for option \"%s\" is out of range [%f %f], clamping to \"%f\".\n", \
+       input,                  \
+       [input_key UTF8String], \
+       min_value.floatValue,   \
+       max_value.floatValue,   \
+       used_value.floatValue); \
+}
+            if (input > max_value.floatValue) {
+                used_value = max_value;
+                CLAMP_WARNING;
+            } else if (input < min_value.floatValue) {
+                used_value = min_value;
+                CLAMP_WARNING;
+            } else {
+                used_value = [NSNumber numberWithFloat:input];
+            }
+
+            [filter setValue:used_value forKey:input_key];
+        } else if ([input_class isEqualToString:@"CIVector"]) {
+            CIVector *input = [CIVector vectorWithString:input_val];
+
+            if (!input) {
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIVctor description: \"%s\".\n",
+                       [input_val UTF8String]);
+                return;
+            }
+
+            [filter setValue:input forKey:input_key];
+        } else if ([input_class isEqualToString:@"CIColor"]) {
+            CIColor *input = [CIColor colorWithString:input_val];
+
+            if (!input) {
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIColor description: \"%s\".\n",
+                       [input_val UTF8String]);
+                return;
+            }
+
+            [filter setValue:input forKey:input_key];
+        } else if ([input_class isEqualToString:@"NSString"]) { // set display name as string with latin1 encoding
+            [filter setValue:input_val forKey:input_key];
+        } else if ([input_class isEqualToString:@"NSData"]) { // set display name as string with latin1 encoding
+            NSData *input = [NSData dataWithBytes:(const void*)[input_val cStringUsingEncoding:NSISOLatin1StringEncoding]
+                                    length:[input_val lengthOfBytesUsingEncoding:NSISOLatin1StringEncoding]];
+
+            if (!input) {
+                av_log(ctx, AV_LOG_WARNING, "Skipping invalid NSData description: \"%s\".\n",
+                       [input_val UTF8String]);
+                return;
+            }
+
+            [filter setValue:input forKey:input_key];
+        } else {
+            av_log(ctx, AV_LOG_WARNING, "Skipping unsupported option class: \"%s\".\n",
+                   [input_class UTF8String]);
+            avpriv_report_missing_feature(ctx, "Handling of some option classes");
+            return;
+        }
+}
+
+/** Create a filter object by a given name and set all options to defaults.
+ *  Overwrite any option given by the user to the provided value in filter_options.
+ */
+static CIFilter* create_filter(CoreImageContext *ctx, const char *filter_name, AVDictionary *filter_options)
+{
+    // create filter object
+    CIFilter *filter = [CIFilter filterWithName:[NSString stringWithUTF8String:filter_name]];
+
+    // set default options
+    [filter setDefaults];
+
+    // set user options
+    if (filter_options) {
+        AVDictionaryEntry *o = NULL;
+        while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
+            set_option(ctx, filter, o->key, o->value);
+        }
+    }
+
+    return filter;
+}
+
+/** Initialize all filters, parse all provided options or just list all available filters if requested.
+ */
+static av_cold int init(AVFilterContext *fctx)
+{
+    CoreImageContext *ctx     = fctx->priv;
+    AVDictionary *filter_dict = NULL;
+    AVDictionaryEntry *f      = NULL;
+    AVDictionaryEntry *o      = NULL;
+    int ret;
+    int i;
+
+    if (ctx->list_filters) {
+        list_filters(ctx);
+        return AVERROR_EXIT;
+    }
+
+    if (ctx->filter_string) {
+        // parse filter string (filter=name at opt=val at opt2=val2#name2 at opt3=val3) for filters seperated by #
+        av_log(ctx, AV_LOG_DEBUG, "Filter_string: %s\n", ctx->filter_string);
+        ret = av_dict_parse_string(&filter_dict, ctx->filter_string, "@", "#", AV_DICT_MULTIKEY); // parse filter_name:all_filter_options
+        if (ret) {
+            av_log(ctx, AV_LOG_ERROR, "Parsing of filters failed.\n");
+            return AVERROR(EIO);
+        }
+        ctx->num_filters = av_dict_count(filter_dict);
+        av_log(ctx, AV_LOG_DEBUG, "Filter count: %i\n", ctx->num_filters);
+
+        // allocate CIFilter array
+        ctx->filters = av_mallocz(ctx->num_filters * sizeof(CIFilter*));
+        if (!ctx->filters) {
+            av_log(ctx, AV_LOG_ERROR, "Could not allocate filter array.\n");
+            return AVERROR(ENOMEM);
+        }
+
+        // parste filters for option key-value pairs (opt=val at opt2=val2) seperated by @
+        i = 0;
+        while ((f = av_dict_get(filter_dict, "", f, AV_DICT_IGNORE_SUFFIX))) {
+            AVDictionary *filter_options = NULL;
+
+            if (strncmp(f->value, "default", 7)) { // not default
+                ret = av_dict_parse_string(&filter_options, f->value, "=", "@", 0); // parse option_name:option_value
+                if (ret) {
+                    av_log(ctx, AV_LOG_ERROR, "Parsing of filter options for \"%s\" failed.\n", f->key);
+                    return AVERROR(EIO);
+                }
+            }
+
+            if (av_log_get_level() >= AV_LOG_DEBUG) {
+                av_log(ctx, AV_LOG_DEBUG, "Creating filter %i: \"%s\":\n", i, f->key);
+                if (!filter_options) {
+                    av_log(ctx, AV_LOG_DEBUG, "\tusing default options\n");
+                } else {
+                    while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
+                        av_log(ctx, AV_LOG_DEBUG, "\t%s: %s\n", o->key, o->value);
+                    }
+                }
+            }
+
+            ctx->filters[i] = CFBridgingRetain(create_filter(ctx, f->key, filter_options));
+            if (!ctx->filters[i]) {
+                av_log(ctx, AV_LOG_ERROR, "Could not create filter \"%s\".\n", f->key);
+                return AVERROR(EINVAL);
+            }
+
+            i++;
+        }
+    } else {
+        av_log(ctx, AV_LOG_ERROR, "No filters specified.\n");
+        return AVERROR(EINVAL);
+    }
+
+    // create GPU context on OSX
+    const NSOpenGLPixelFormatAttribute attr[] = {
+        NSOpenGLPFAAccelerated,
+        NSOpenGLPFANoRecovery,
+        NSOpenGLPFAColorSize, 32,
+        0
+    };
+
+    NSOpenGLPixelFormat *pixel_format = [[NSOpenGLPixelFormat alloc] initWithAttributes:(void *)&attr];
+    ctx->color_space                  = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
+    ctx->glctx                        = CFBridgingRetain([CIContext contextWithCGLContext:CGLGetCurrentContext()
+                                                         pixelFormat:[pixel_format CGLPixelFormatObj]
+                                                         colorSpace:ctx->color_space
+                                                         options:nil]);
+
+    if (!ctx->glctx) {
+        av_log(ctx, AV_LOG_ERROR, "CIContext not created.\n");
+        return -1;
+    }
+
+    // Creating an empty input image as input container for the context
+    ctx->input_image = CFBridgingRetain([CIImage emptyImage]);
+
+    return 0;
+}
+
+/** Uninitialize all filters, contexts and free all allocated memory.
+ */
+static av_cold void uninit(AVFilterContext *fctx)
+{
+#define SafeCFRelease(ptr) { \
+    if (ptr) {               \
+        CFRelease(ptr);      \
+        ptr = NULL;          \
+    }                        \
+}
+
+    CoreImageContext *ctx = fctx->priv;
+
+    SafeCFRelease(ctx->glctx);
+    SafeCFRelease(ctx->cgctx);
+    SafeCFRelease(ctx->color_space);
+    SafeCFRelease(ctx->input_image);
+
+    if (ctx->filters) {
+        for (int i = 0; i < ctx->num_filters; i++) {
+            SafeCFRelease(ctx->filters[i]);
+        }
+        av_free(ctx->filters);
+    }
+
+}
+
+static const AVFilterPad avfilter_vf_coreimage_inputs[] = {
+    {
+        .name             = "default",
+        .type             = AVMEDIA_TYPE_VIDEO,
+        .get_video_buffer = get_video_buffer,
+        .filter_frame     = filter_frame,
+        .config_props     = config_input,
+    },
+    { NULL }
+};
+
+static const AVFilterPad avfilter_vf_coreimage_outputs[] = {
+    {
+        .name = "default",
+        .type = AVMEDIA_TYPE_VIDEO,
+    },
+    { NULL }
+};
+
+#define OFFSET(x) offsetof(CoreImageContext, x)
+#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
+static const AVOption coreimage_options[] = {
+    { "list_filters", "list available filters",  OFFSET(list_filters), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, .flags = FLAGS, "list_filters" },
+    { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, FLAGS, "list_filters" },
+    { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, FLAGS, "list_filters" },
+    { "filter", "names and options of filters to apply",  OFFSET(filter_string), AV_OPT_TYPE_STRING, { .str = NULL }, .flags = FLAGS },
+    { NULL }
+};
+
+static const AVClass coreimage_class = {
+    .class_name = "coreimage",
+    .item_name  = av_default_item_name,
+    .option     = coreimage_options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_FILTER,
+};
+
+AVFilter ff_vf_coreimage = {
+    .name          = "coreimage",
+    .description   = NULL_IF_CONFIG_SMALL("Video filtering using CoreImage API."),
+    .init          = init,
+    .uninit        = uninit,
+    .priv_size     = sizeof(CoreImageContext),
+    .priv_class    = &coreimage_class,
+    .inputs        = avfilter_vf_coreimage_inputs,
+    .outputs       = avfilter_vf_coreimage_outputs,
+    .query_formats = query_formats,
+};
-- 
2.4.9 (Apple Git-60)



More information about the ffmpeg-devel mailing list