Apply the default value for timeout in code instead of via the
avoption, to allow distinguishing the default value from the user
not setting anything at all.
---
Previously approved.
---
 libavformat/tcp.c | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/libavformat/tcp.c b/libavformat/tcp.c
index f211142..1498c26 100644
--- a/libavformat/tcp.c
+++ b/libavformat/tcp.c
@@ -43,7 +43,7 @@ typedef struct TCPContext {
 #define E AV_OPT_FLAG_ENCODING_PARAM
 static const AVOption options[] = {
     { "listen",          "Listen for incoming connections",  OFFSET(listen),   
      AV_OPT_TYPE_INT, { .i64 = 0 },     0,       1,       .flags = D|E },
-    { "timeout",         "Connection timeout (in milliseconds)", 
OFFSET(timeout),    AV_OPT_TYPE_INT, { .i64 = 10000 }, INT_MIN, INT_MAX, .flags 
= D|E },
+    { "timeout",         "Connection timeout (in milliseconds)", 
OFFSET(timeout),    AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, .flags = 
D|E },
     { "listen_timeout",  "Bind timeout (in milliseconds)",   
OFFSET(listen_timeout), AV_OPT_TYPE_INT, { .i64 = -1 },    INT_MIN, INT_MAX, 
.flags = D|E },
     { NULL }
 };
@@ -86,6 +86,10 @@ static int tcp_open(URLContext *h, const char *uri, int 
flags)
             s->listen_timeout = strtol(buf, NULL, 10);
         }
     }
+    if (!s->timeout)
+        s->timeout = h->rw_timeout ? h->rw_timeout / 1000 : 10000;
+    if (h->rw_timeout && s->listen_timeout < 0)
+        s->listen_timeout = h->rw_timeout / 1000;
     hints.ai_family = AF_UNSPEC;
     hints.ai_socktype = SOCK_STREAM;
     snprintf(portstr, sizeof(portstr), "%d", port);
-- 
2.5.4 (Apple Git-61)

_______________________________________________
libav-devel mailing list
libav-devel@libav.org
https://lists.libav.org/mailman/listinfo/libav-devel

Reply via email to