Re: [FFmpeg-devel] [PATCH V5 2/2] dnn/native: add log error message

2020-08-24 Thread Guo, Yejun


> -Original Message-
> From: ffmpeg-devel  On Behalf Of Ting Fu
> Sent: 2020年8月25日 11:48
> To: ffmpeg-devel@ffmpeg.org
> Subject: [FFmpeg-devel] [PATCH V5 2/2] dnn/native: add log error message
> 
> Signed-off-by: Ting Fu 
> ---
>  libavfilter/dnn/dnn_backend_native.c  | 55 +++
>  libavfilter/dnn/dnn_backend_native.h  |  5 ++
>  .../dnn/dnn_backend_native_layer_avgpool.c| 10 +++-
>  .../dnn/dnn_backend_native_layer_avgpool.h|  2 +-
>  .../dnn/dnn_backend_native_layer_conv2d.c | 10 +++-
>  .../dnn/dnn_backend_native_layer_conv2d.h |  2 +-
>  .../dnn_backend_native_layer_depth2space.c| 10 +++-
>  .../dnn_backend_native_layer_depth2space.h|  2 +-
>  .../dnn/dnn_backend_native_layer_mathbinary.c | 11
> +++-  .../dnn/dnn_backend_native_layer_mathbinary.h |  2
> +-  .../dnn/dnn_backend_native_layer_mathunary.c  | 11
> +++-  .../dnn/dnn_backend_native_layer_mathunary.h  |  2 +-
>  .../dnn/dnn_backend_native_layer_maximum.c| 10 +++-
>  .../dnn/dnn_backend_native_layer_maximum.h|  2 +-
>  .../dnn/dnn_backend_native_layer_pad.c| 10 +++-
>  .../dnn/dnn_backend_native_layer_pad.h|  2 +-
>  libavfilter/dnn/dnn_backend_native_layers.h   |  2 +-
>  tests/dnn/dnn-layer-avgpool-test.c|  4 +-
>  tests/dnn/dnn-layer-conv2d-test.c |  4 +-
>  tests/dnn/dnn-layer-depth2space-test.c|  2 +-
>  tests/dnn/dnn-layer-mathbinary-test.c |  6 +-
>  tests/dnn/dnn-layer-mathunary-test.c  |  2 +-
>  tests/dnn/dnn-layer-maximum-test.c|  2 +-
>  tests/dnn/dnn-layer-pad-test.c|  6 +-
>  24 files changed, 122 insertions(+), 52 deletions(-)

this patch set looks good to me, will push soon, thanks.
___
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
https://ffmpeg.org/mailman/listinfo/ffmpeg-devel

To unsubscribe, visit link above, or email
ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".

[FFmpeg-devel] [PATCH V5 2/2] dnn/native: add log error message

2020-08-24 Thread Ting Fu
Signed-off-by: Ting Fu 
---
 libavfilter/dnn/dnn_backend_native.c  | 55 +++
 libavfilter/dnn/dnn_backend_native.h  |  5 ++
 .../dnn/dnn_backend_native_layer_avgpool.c| 10 +++-
 .../dnn/dnn_backend_native_layer_avgpool.h|  2 +-
 .../dnn/dnn_backend_native_layer_conv2d.c | 10 +++-
 .../dnn/dnn_backend_native_layer_conv2d.h |  2 +-
 .../dnn_backend_native_layer_depth2space.c| 10 +++-
 .../dnn_backend_native_layer_depth2space.h|  2 +-
 .../dnn/dnn_backend_native_layer_mathbinary.c | 11 +++-
 .../dnn/dnn_backend_native_layer_mathbinary.h |  2 +-
 .../dnn/dnn_backend_native_layer_mathunary.c  | 11 +++-
 .../dnn/dnn_backend_native_layer_mathunary.h  |  2 +-
 .../dnn/dnn_backend_native_layer_maximum.c| 10 +++-
 .../dnn/dnn_backend_native_layer_maximum.h|  2 +-
 .../dnn/dnn_backend_native_layer_pad.c| 10 +++-
 .../dnn/dnn_backend_native_layer_pad.h|  2 +-
 libavfilter/dnn/dnn_backend_native_layers.h   |  2 +-
 tests/dnn/dnn-layer-avgpool-test.c|  4 +-
 tests/dnn/dnn-layer-conv2d-test.c |  4 +-
 tests/dnn/dnn-layer-depth2space-test.c|  2 +-
 tests/dnn/dnn-layer-mathbinary-test.c |  6 +-
 tests/dnn/dnn-layer-mathunary-test.c  |  2 +-
 tests/dnn/dnn-layer-maximum-test.c|  2 +-
 tests/dnn/dnn-layer-pad-test.c|  6 +-
 24 files changed, 122 insertions(+), 52 deletions(-)

diff --git a/libavfilter/dnn/dnn_backend_native.c 
b/libavfilter/dnn/dnn_backend_native.c
index 436ce938da..a8fe6b94eb 100644
--- a/libavfilter/dnn/dnn_backend_native.c
+++ b/libavfilter/dnn/dnn_backend_native.c
@@ -28,15 +28,26 @@
 #include "dnn_backend_native_layer_conv2d.h"
 #include "dnn_backend_native_layers.h"
 
+static const AVClass dnn_native_class = {
+.class_name = "dnn_native",
+.item_name  = av_default_item_name,
+.option = NULL,
+.version= LIBAVUTIL_VERSION_INT,
+.category   = AV_CLASS_CATEGORY_FILTER,
+};
+
 static DNNReturnType get_input_native(void *model, DNNData *input, const char 
*input_name)
 {
 NativeModel *native_model = (NativeModel *)model;
+NativeContext *ctx = &native_model->ctx;
 
 for (int i = 0; i < native_model->operands_num; ++i) {
 DnnOperand *oprd = &native_model->operands[i];
 if (strcmp(oprd->name, input_name) == 0) {
-if (oprd->type != DOT_INPUT)
+if (oprd->type != DOT_INPUT) {
+av_log(ctx, AV_LOG_ERROR, "Found \"%s\" in model, but it is 
not input node\n", input_name);
 return DNN_ERROR;
+}
 input->dt = oprd->data_type;
 av_assert0(oprd->dims[0] == 1);
 input->height = oprd->dims[1];
@@ -47,30 +58,37 @@ static DNNReturnType get_input_native(void *model, DNNData 
*input, const char *i
 }
 
 // do not find the input operand
+av_log(ctx, AV_LOG_ERROR, "Could not find \"%s\" in model\n", input_name);
 return DNN_ERROR;
 }
 
 static DNNReturnType set_input_native(void *model, DNNData *input, const char 
*input_name)
 {
 NativeModel *native_model = (NativeModel *)model;
+NativeContext *ctx = &native_model->ctx;
 DnnOperand *oprd = NULL;
 
-if (native_model->layers_num <= 0 || native_model->operands_num <= 0)
+if (native_model->layers_num <= 0 || native_model->operands_num <= 0) {
+av_log(ctx, AV_LOG_ERROR, "No operands or layers in model\n");
 return DNN_ERROR;
+}
 
 /* inputs */
 for (int i = 0; i < native_model->operands_num; ++i) {
 oprd = &native_model->operands[i];
 if (strcmp(oprd->name, input_name) == 0) {
-if (oprd->type != DOT_INPUT)
+if (oprd->type != DOT_INPUT) {
+av_log(ctx, AV_LOG_ERROR, "Found \"%s\" in model, but it is 
not input node\n", input_name);
 return DNN_ERROR;
+}
 break;
 }
 oprd = NULL;
 }
-
-if (!oprd)
+if (!oprd) {
+av_log(ctx, AV_LOG_ERROR, "Could not find \"%s\" in model\n", 
input_name);
 return DNN_ERROR;
+}
 
 oprd->dims[0] = 1;
 oprd->dims[1] = input->height;
@@ -79,11 +97,15 @@ static DNNReturnType set_input_native(void *model, DNNData 
*input, const char *i
 
 av_freep(&oprd->data);
 oprd->length = calculate_operand_data_length(oprd);
-if (oprd->length <= 0)
+if (oprd->length <= 0) {
+av_log(ctx, AV_LOG_ERROR, "The input data length overflow\n");
 return DNN_ERROR;
+}
 oprd->data = av_malloc(oprd->length);
-if (!oprd->data)
+if (!oprd->data) {
+av_log(ctx, AV_LOG_ERROR, "Failed to malloc memory for input data\n");
 return DNN_ERROR;
+}
 
 input->data = oprd->data;
 
@@ -150,6 +172,8 @@ DNNModel *ff_dnn_load_model_native(const char 
*model_filename, const char *optio
 if (!native_model){
 goto fail;
 }
+
+native_model->ctx.class = &dnn_native_class;
 model->model = (void