From 09a17dfa249b264f7c1d0b89c82d9f2a6cfbf745 Mon Sep 17 00:00:00 2001 From: sosokker Date: Wed, 23 Aug 2023 23:49:44 +0700 Subject: [PATCH] Update Layers/ Use as parent class/ Add Dense Layers --- src/Activation/Activation.hpp | 16 +++++++++ src/Layers/DenseLayer.cpp | 26 +++++++++++++++ src/Layers/DenseLayer.hpp | 17 ++++++++++ src/Layers/Layers.hpp | 63 ++++++++++------------------------- 4 files changed, 76 insertions(+), 46 deletions(-) create mode 100644 src/Activation/Activation.hpp create mode 100644 src/Layers/DenseLayer.cpp create mode 100644 src/Layers/DenseLayer.hpp diff --git a/src/Activation/Activation.hpp b/src/Activation/Activation.hpp new file mode 100644 index 0000000..e51f6f7 --- /dev/null +++ b/src/Activation/Activation.hpp @@ -0,0 +1,16 @@ +#include +#include +#include + +class Activation : public Layer { +public: + Activation(std::function activation, + std::function activation_prime); + + void forward(const Eigen::VectorXd& input) override; + void backward(const Eigen::VectorXd& output_gradient, double learning_rate) override; + +private: + std::function activation; + std::function activation_prime; +}; diff --git a/src/Layers/DenseLayer.cpp b/src/Layers/DenseLayer.cpp new file mode 100644 index 0000000..14ec230 --- /dev/null +++ b/src/Layers/DenseLayer.cpp @@ -0,0 +1,26 @@ +#include "DenseLayer.hpp" + +DenseLayer::DenseLayer(int input_size, int output_size) { + weights = Eigen::MatrixXd::Random(output_size, input_size); + bias = Eigen::VectorXd::Random(output_size); +} + +void DenseLayer::forward(const Eigen::VectorXd& input) { + this->input = input; + output = weights * input + bias; +} + +void DenseLayer::backward(const Eigen::VectorXd& output_gradient, double learning_rate) { + Eigen::MatrixXd weights_gradient = output_gradient * input.transpose(); + input_gradient = weights.transpose() * output_gradient; + weights -= learning_rate * weights_gradient; + bias -= learning_rate * output_gradient; +} + +const Eigen::VectorXd& DenseLayer::getOutput() const { + return output; +} + +const Eigen::VectorXd& DenseLayer::getInputGradient() const { + return input_gradient; +} diff --git a/src/Layers/DenseLayer.hpp b/src/Layers/DenseLayer.hpp new file mode 100644 index 0000000..b2d30e4 --- /dev/null +++ b/src/Layers/DenseLayer.hpp @@ -0,0 +1,17 @@ +#include +#include "Layers.hpp" + +class DenseLayer : public Layer { +public: + DenseLayer(int input_size, int output_size); + + void forward(const Eigen::VectorXd& input) override; + void backward(const Eigen::VectorXd& output_gradient, double learning_rate) override; + + const Eigen::VectorXd& getOutput() const; + const Eigen::VectorXd& getInputGradient() const; + +private: + Eigen::MatrixXd weights; + Eigen::VectorXd bias; +}; diff --git a/src/Layers/Layers.hpp b/src/Layers/Layers.hpp index 9de4a44..a5f1239 100644 --- a/src/Layers/Layers.hpp +++ b/src/Layers/Layers.hpp @@ -1,51 +1,22 @@ -#pragma once -#include -#include +#ifndef LAYERS_HPP +#define LAYERS_HPP + +#include class Layer { public: - virtual void forward(const std::vector& inputs, bool training) = 0; - virtual void backward(std::vector& dvalues) = 0; - virtual std::pair>, std::vector>> get_parameters() const = 0; - virtual void set_parameters(const std::vector>& weights, const std::vector>& biases) = 0; + Layer(); + + virtual void forward(const Eigen::VectorXd& input_data); + virtual void backward(const Eigen::VectorXd& output_gradient, double learning_rate); + + const Eigen::VectorXd& getOutput() const; + const Eigen::VectorXd& getInputGradient() const; + +protected: + Eigen::VectorXd input; + Eigen::VectorXd output; + Eigen::VectorXd input_gradient; }; -class Layer_Dense : public Layer { -private: - double weight_regularizer_l1, weight_regularizer_l2, - bias_regularizer_l1, bias_regularizer_l2; - std::vector> weights, biases; - std::vector inputs; - -public: - Layer_Dense(int n_inputs, int n_neurons, - double weight_regularizer_l1, - double weight_regularizer_l2, - double bias_regularizer_l1, - double bias_regularizer_l2); - - void forward(const std::vector& inputs, bool training) override; - void backward(std::vector& dvalues) override; - std::pair>, std::vector>> get_parameters() const override; - void set_parameters(const std::vector>& weights, const std::vector>& biases) override; -}; - -class Layer_Dropout : public Layer { -private: - double rate; - -public: - Layer_Dropout(double rate); - - void forward(const std::vector& inputs, bool training) override; - void backward(std::vector& dvalues) override; -}; - - -class Layer_Input { -private: - std::vector output; - -public: - void forward(const std::vector& inputs, bool training); -}; \ No newline at end of file +#endif // LAYERS_HPP