From 168af40fe9a3cc81c6ee16b3e81f154780c36bdb Mon Sep 17 00:00:00 2001
From: Scheaven <xuepengqiang>
Date: 星期四, 03 六月 2021 15:03:27 +0800
Subject: [PATCH] up new v4

---
 lib/detecter_tools/darknet/conv_lstm_layer.h |   73 ++++++++++++++++++++----------------
 1 files changed, 40 insertions(+), 33 deletions(-)

diff --git a/lib/detecter_tools/darknet/conv_lstm_layer.h b/lib/detecter_tools/darknet/conv_lstm_layer.h
index d296328..fae59f1 100644
--- a/lib/detecter_tools/darknet/conv_lstm_layer.h
+++ b/lib/detecter_tools/darknet/conv_lstm_layer.h
@@ -1,33 +1,40 @@
-#ifndef CONV_LSTM_LAYER_H
-#define CONV_LSTM_LAYER_H
-
-#include "activations.h"
-#include "layer.h"
-#include "network.h"
-#define USET
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-layer make_conv_lstm_layer(int batch, int h, int w, int c, int output_filters, int groups, int steps, int size, int stride, int dilation, int pad, ACTIVATION activation, int batch_normalize, int peephole, int xnor, int train);
-void resize_conv_lstm_layer(layer *l, int w, int h);
-void free_state_conv_lstm(layer l);
-void randomize_state_conv_lstm(layer l);
-void remember_state_conv_lstm(layer l);
-void restore_state_conv_lstm(layer l);
-
-void forward_conv_lstm_layer(layer l, network_state state);
-void backward_conv_lstm_layer(layer l, network_state state);
-void update_conv_lstm_layer(layer l, int batch, float learning_rate, float momentum, float decay);
-
-#ifdef GPU
-void forward_conv_lstm_layer_gpu(layer l, network_state state);
-void backward_conv_lstm_layer_gpu(layer l, network_state state);
-void update_conv_lstm_layer_gpu(layer l, int batch, float learning_rate, float momentum, float decay, float loss_scale);
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif  // CONV_LSTM_LAYER_H
+#ifndef CONV_LSTM_LAYER_H
+#define CONV_LSTM_LAYER_H
+
+#include "activations.h"
+#include "layer.h"
+#include "network.h"
+#define USET
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+layer make_conv_lstm_layer(int batch, int h, int w, int c, int output_filters, int groups, int steps, int size, int stride, int dilation, int pad, ACTIVATION activation, int batch_normalize, int peephole, int xnor, int bottleneck, int train);
+void resize_conv_lstm_layer(layer *l, int w, int h);
+void free_state_conv_lstm(layer l);
+void randomize_state_conv_lstm(layer l);
+void remember_state_conv_lstm(layer l);
+void restore_state_conv_lstm(layer l);
+
+void forward_conv_lstm_layer(layer l, network_state state);
+void backward_conv_lstm_layer(layer l, network_state state);
+void update_conv_lstm_layer(layer l, int batch, float learning_rate, float momentum, float decay);
+
+layer make_history_layer(int batch, int h, int w, int c, int history_size, int steps, int train);
+void forward_history_layer(layer l, network_state state);
+void backward_history_layer(layer l, network_state state);
+
+#ifdef GPU
+void forward_conv_lstm_layer_gpu(layer l, network_state state);
+void backward_conv_lstm_layer_gpu(layer l, network_state state);
+void update_conv_lstm_layer_gpu(layer l, int batch, float learning_rate, float momentum, float decay, float loss_scale);
+
+void forward_history_layer_gpu(const layer l, network_state state);
+void backward_history_layer_gpu(const layer l, network_state state);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // CONV_LSTM_LAYER_H

--
Gitblit v1.8.0