Update include path/ Change function type in Loss

This commit is contained in:
sosokker 2023-08-26 02:09:42 +07:00
parent 874ba254ba
commit ae1da7bfd9
3 changed files with 7 additions and 7 deletions

View File

@ -1,4 +1,4 @@
#include <Eigen/Dense>
#include "../../include/Eigen/Dense"
#include "Layers.hpp"
class DenseLayer : public Layer {

View File

@ -1,7 +1,7 @@
#ifndef LAYERS_HPP
#define LAYERS_HPP
#include <Eigen/Dense>
#include "../../include/Eigen/Dense"
class Layer {
public:

View File

@ -1,16 +1,16 @@
#include <Eigen/Dense>
#include "../../include/Eigen/Dense"
#include <cmath>
double mse(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {
return (y_true - y_pred).squaredNorm() / y_true.size();
Eigen::VectorXd mse(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {
return (y_true - y_pred).array().square() / y_true.size();
}
Eigen::VectorXd mse_prime(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {
return 2.0 * (y_pred - y_true) / y_true.size();
}
double binary_cross_entropy(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {
return -((y_true.array() * y_pred.array().log()) + ((1 - y_true.array()) * (1 - y_pred.array()).log())).mean();
Eigen::VectorXd binary_cross_entropy(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {
return -((y_true.array() * y_pred.array().log()) + ((1 - y_true.array()) * (1 - y_pred.array()).log())) / y_true.size();
}
Eigen::VectorXd binary_cross_entropy_prime(const Eigen::VectorXd& y_true, const Eigen::VectorXd& y_pred) {