[FFmpeg-devel] [PATCH 2/2] lavf/concat: add max_opened_files option.
Nicolas George
nicolas.george at normalesup.org
Sun Jul 21 16:14:24 CEST 2013
Fix part of trac ticket #2801.
Signed-off-by: Nicolas George <nicolas.george at normalesup.org>
---
doc/protocols.texi | 9 ++++
libavformat/concat.c | 144 +++++++++++++++++++++++++++++++++++++++++---------
2 files changed, 129 insertions(+), 24 deletions(-)
Another option, which I slightly prefer, would be to have in lavu a generic
linked list API. Possible patch soon.
diff --git a/doc/protocols.texi b/doc/protocols.texi
index e8427aa..c2cd4c2 100644
--- a/doc/protocols.texi
+++ b/doc/protocols.texi
@@ -75,6 +75,15 @@ where @var{URL1}, @var{URL2}, ..., @var{URLN} are the urls of the
resource to be concatenated, each one possibly specifying a distinct
protocol.
+The accepted options are:
+ at table @option
+
+ at item max_opened_files
+Set the maximum number of simultaneously opened files; defaults to 0 for
+unlimited.
+
+ at end table
+
For example to read a sequence of files @file{split1.mpeg},
@file{split2.mpeg}, @file{split3.mpeg} with @command{ffplay} use the
command:
diff --git a/libavformat/concat.c b/libavformat/concat.c
index f97354c..bc89d4f 100644
--- a/libavformat/concat.c
+++ b/libavformat/concat.c
@@ -24,6 +24,7 @@
#include "avformat.h"
#include "libavutil/avstring.h"
#include "libavutil/mem.h"
+#include "libavutil/opt.h"
#include "url.h"
#define AV_CAT_SEPARATOR "|"
@@ -31,12 +32,33 @@
struct concat_nodes {
URLContext *uc; ///< node's URLContext
int64_t size; ///< url filesize
+ struct concat_nodes *lru_next; ///< next (more recent) node in LRU list
+ struct concat_nodes *lru_prev; ///< previous (older) node in LRU list
};
struct concat_data {
+ const AVClass *class;
struct concat_nodes *nodes; ///< list of nodes to concat
size_t length; ///< number of cat'ed nodes
size_t current; ///< index of currently read node
+
+ int max_opened_files; ///< maximum number of opened nodes
+ size_t lru_count; ///< number of opened nodes
+ struct concat_nodes *lru_next; ///< least recently used node
+};
+
+static const AVOption concat_options[] = {
+ { "max_opened_files", "set maximum number of simultaneously opened files",
+ offsetof(struct concat_data, max_opened_files), AV_OPT_TYPE_INT,
+ { .i64 = 0 }, 0, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM },
+ { NULL }
+};
+
+static const AVClass concat_class = {
+ .class_name = "concat",
+ .item_name = av_default_item_name,
+ .option = concat_options,
+ .version = LIBAVUTIL_VERSION_INT,
};
static av_cold int concat_close(URLContext *h)
@@ -54,11 +76,80 @@ static av_cold int concat_close(URLContext *h)
return err < 0 ? -1 : 0;
}
+#include "libavutil/avassert.h"
+
+static void lru_insert_mru(struct concat_data *data, struct concat_nodes *node)
+{
+ struct concat_nodes *lru = data->lru_next;
+
+ /* Note: the LRU list is circular: lru->prev = mru */
+ node->lru_next = lru;
+ node->lru_prev = lru->lru_prev;
+ lru ->lru_prev = lru->lru_prev->lru_next = node;
+}
+
+/**
+ * Connect to a node, disconnecting from the least recently used (LRU) if
+ * necessary.
+ */
+static int lru_connect(URLContext *h, size_t idx)
+{
+ struct concat_data *data = h->priv_data;
+ struct concat_nodes *node = &data->nodes[idx];
+ int ret;
+
+ if (node->uc->is_connected) {
+ if (data->lru_next == node) {
+ /* Single element list or LRU becoming MRU */
+ data->lru_next = node->lru_next;
+ } else {
+ /* Remove from old place */
+ node->lru_prev->lru_next = node->lru_next;
+ node->lru_next->lru_prev = node->lru_prev;
+ lru_insert_mru(data, node);
+ }
+ return 0;
+ }
+
+ if (data->max_opened_files &&
+ data->lru_count == data->max_opened_files) {
+ /* Close least recently used */
+ struct concat_nodes *lru = data->lru_next;
+ av_log(h, AV_LOG_DEBUG, "Closing node #%td\n", lru - data->nodes);
+ if ((ret = ffurl_disconnect(lru->uc)) < 0)
+ return ret;
+ lru->lru_prev->lru_next = data->lru_next = lru->lru_next;
+ lru->lru_next->lru_prev = lru->lru_prev;
+ data->lru_count--;
+ }
+
+ av_log(h, AV_LOG_DEBUG, "Opening node #%td\n", idx);
+ if ((ret = ffurl_connect(node->uc, NULL)) < 0)
+ return ret;
+
+ /* Move to most recently used */
+ if (++data->lru_count == 1)
+ node->lru_next = node->lru_prev = data->lru_next = node;
+ else
+ lru_insert_mru(data, node);
+
+ return 0;
+}
+
+static int64_t lru_seek(URLContext *h, size_t idx, int64_t pos, int whence)
+{
+ struct concat_data *data = h->priv_data;
+ int ret;
+
+ if ((ret = lru_connect(h, idx)) < 0)
+ return ret;
+ return ffurl_seek(data->nodes[idx].uc, pos, whence);
+}
+
static av_cold int concat_open(URLContext *h, const char *uri, int flags)
{
char *node_uri = NULL, *tmp_uri;
int err = 0;
- int64_t size;
size_t len, i;
URLContext *uc;
struct concat_data *data = h->priv_data;
@@ -87,38 +178,42 @@ static av_cold int concat_open(URLContext *h, const char *uri, int flags)
len = strcspn(uri, AV_CAT_SEPARATOR);
if (!(tmp_uri = av_realloc(node_uri, len+1))) {
err = AVERROR(ENOMEM);
- break;
+ goto fail;
} else
node_uri = tmp_uri;
av_strlcpy(node_uri, uri, len+1);
uri += len + strspn(uri+len, AV_CAT_SEPARATOR);
/* creating URLContext */
- if ((err = ffurl_open(&uc, node_uri, flags,
- &h->interrupt_callback, NULL)) < 0)
- break;
-
- /* creating size */
- if ((size = ffurl_size(uc)) < 0) {
- ffurl_close(uc);
- err = AVERROR(ENOSYS);
- break;
- }
-
- /* assembling */
+ if ((err = ffurl_alloc(&uc, node_uri, flags,
+ &h->interrupt_callback)) < 0)
+ goto fail;
nodes[i].uc = uc;
- nodes[i].size = size;
}
av_free(node_uri);
data->length = i;
- if (err < 0)
- concat_close(h);
- else if (!(nodes = av_realloc(nodes, data->length * sizeof(*nodes)))) {
- concat_close(h);
+ if (!(nodes = av_realloc(nodes, data->length * sizeof(*nodes)))) {
err = AVERROR(ENOMEM);
- } else
- data->nodes = nodes;
+ goto fail;
+ }
+ data->nodes = nodes;
+
+ for (i = 0; i < data->length; i++) {
+ if ((err = lru_connect(h, i)) < 0)
+ goto fail;
+ if ((nodes[i].size = ffurl_size(nodes[i].uc)) < 0) {
+ err = AVERROR(ENOSYS);
+ goto fail;
+ }
+ }
+
+ if ((err = lru_connect(h, data->current)) < 0)
+ goto fail;
+ return 0;
+
+fail:
+ concat_close(h);
return err;
}
@@ -135,7 +230,7 @@ static int concat_read(URLContext *h, unsigned char *buf, int size)
return total ? total : result;
if (!result)
if (i + 1 == data->length ||
- ffurl_seek(nodes[++i].uc, 0, SEEK_SET) < 0)
+ lru_seek(h, ++i, 0, SEEK_SET) < 0)
break;
total += result;
buf += result;
@@ -163,7 +258,7 @@ static int64_t concat_seek(URLContext *h, int64_t pos, int whence)
/* get the absolute position */
for (i = 0; i != data->current; i++)
pos += nodes[i].size;
- pos += ffurl_seek(nodes[i].uc, 0, SEEK_CUR);
+ pos += lru_seek(h, i, 0, SEEK_CUR);
whence = SEEK_SET;
/* fall through with the absolute position */
case SEEK_SET:
@@ -174,7 +269,7 @@ static int64_t concat_seek(URLContext *h, int64_t pos, int whence)
return AVERROR(EINVAL);
}
- result = ffurl_seek(nodes[i].uc, pos, whence);
+ result = lru_seek(h, i, pos, whence);
if (result >= 0) {
data->current = i;
while (i)
@@ -185,6 +280,7 @@ static int64_t concat_seek(URLContext *h, int64_t pos, int whence)
URLProtocol ff_concat_protocol = {
.name = "concat",
+ .priv_data_class=&concat_class,
.url_open = concat_open,
.url_read = concat_read,
.url_seek = concat_seek,
--
1.7.10.4
More information about the ffmpeg-devel
mailing list