#ifndef RNN_H_INCLUDED #define RNN_H_INCLUDED #include #include #include #include #include #include "keyboard.h" template class rnnStack { public: rnnStack() : initialized(false) {} rnnStack(pcg32_k64& R) { randomize(R); } void randomize(pcg32_k64& r) { initialized = true; std::uniform_real_distribution d(-0.1, 0.1); auto R = [&](){return d(r);}; for (int i = 0; i != L; ++i) { auto& l = layers[i]; if (L == 1) { l.state = Eigen::VectorXf::Zero(hsize); l.weight_is = Eigen::MatrixXf::NullaryExpr(hsize, isize, R); l.weight_ss = Eigen::MatrixXf::NullaryExpr(hsize, hsize, R); l.weight_so = Eigen::MatrixXf::NullaryExpr(osize, hsize, R); } else if (i == 0) { l.state = Eigen::VectorXf::Zero(hsize); l.weight_is = Eigen::MatrixXf::NullaryExpr(hsize, isize, R); l.weight_ss = Eigen::MatrixXf::NullaryExpr(hsize, hsize, R); l.weight_so = Eigen::MatrixXf::NullaryExpr(intsize, hsize, R); } else if (i == L-1) { l.state = Eigen::VectorXf::Zero(hsize); l.weight_is = Eigen::MatrixXf::NullaryExpr(hsize, intsize, R); l.weight_ss = Eigen::MatrixXf::NullaryExpr(hsize, hsize, R); l.weight_so = Eigen::MatrixXf::NullaryExpr(osize, hsize, R); } else { l.state = Eigen::VectorXf::Zero(hsize); l.weight_is = Eigen::MatrixXf::NullaryExpr(hsize, intsize, R); l.weight_ss = Eigen::MatrixXf::NullaryExpr(hsize, hsize, R); l.weight_so = Eigen::MatrixXf::NullaryExpr(intsize, hsize, R); } } } // in: [ // [26 = which key are we in?] // [1 = distance from center of that key] // [1 = current speed] // [2 = acceleration relative to last tick (polar)] // ] = 26+1+1+2 = 30 values // out: [ // [2 = yes,no output] // ] = 2 values Eigen::VectorXf step(Eigen::VectorXf x) { if (!initialized) { throw std::logic_error("used before initialization"); } for (auto& l : layers) { l.state = ( l.weight_ss * l.state + l.weight_is * x // ).unaryExpr([](float d){return std::max(0.,d);}); ).array().tanh(); x = (l.weight_so * l.state); } return x; } void reset() { for (auto& l : layers) { l.state = Eigen::VectorXf::Zero(hsize); } } //step()'s over each value in ins and then resets state to zero std::vector stepOver(std::vector ins) { for (auto& x : ins) { x = step(x); } reset(); return ins; } rnnStack mutate(int divergence, pcg32_k64& _r) const { if (!initialized) throw std::logic_error("used before initialization"); rnnStack tmp{*this}; std::uniform_real_distribution d(-0.01*divergence, 0.01*divergence); auto R = [&](){return d(_r);}; auto m_impl = [&R](float d) {return d + R();}; for (auto& l : tmp.layers) { l.weight_is.unaryExpr(m_impl); l.weight_ss.unaryExpr(m_impl); l.weight_so.unaryExpr(m_impl); } return tmp; } rnnStack mutate_toward(const rnnStack& other, float weight, pcg32_k64& _r) const { if (!initialized) return other; std::uniform_real_distribution d( std::min(0.f, weight), std::max(0.f, weight) ); auto R = [&](float l, float r) { return l + d(_r) * (r - l); }; auto breedMatrices = [&R](const Eigen::MatrixXf& l, const Eigen::MatrixXf& r) -> Eigen::MatrixXf { return Eigen::Map( build>( l.data(), l.data()+l.size(), r.data(), R ).data(), l.rows(), l.cols() ); }; auto tmp{*this}; tmp.layers = build>( layers.begin(), layers.end(), other.layers.begin(), [&](const layer& l, const layer& r) { return layer{ Eigen::VectorXf::Zero(hsize), breedMatrices(l.weight_is, r.weight_is), breedMatrices(l.weight_ss, r.weight_ss), breedMatrices(l.weight_so, r.weight_so), }; } ); return tmp; } private: struct layer { Eigen::VectorXf state; //isize Eigen::MatrixXf weight_is, //hsize*isize weight_ss, //hsize*hsize weight_so; //isize*hsize }; std::array layers; bool initialized; public: friend rnnStack breed(const rnnStack& p1, const rnnStack& p2, pcg32_k64& _r) { if (!(p1.initialized && p2.initialized)) throw std::logic_error("used before initialization"); auto R = [&_r](float l, float r) { return _r(2) ? l : r; }; auto breedMatrices = [&R](const Eigen::MatrixXf& l, const Eigen::MatrixXf& r) { return Eigen::Map( build>(l.data(), l.data()+l.size(), r.data(), R).data(), l.rows(), l.cols() ); }; auto tmp = p1; tmp.layers = build>( p1.layers.begin(), p1.layers.end(), p2.layers.begin(), [&](const layer& l, const layer& r) { return layer{ Eigen::VectorXf::Zero(hsize), breedMatrices(l.weight_is, r.weight_is), breedMatrices(l.weight_ss, r.weight_ss), breedMatrices(l.weight_so, r.weight_so), }; } ); return tmp; } const std::array& exportLayers() {return layers;} }; //Damerau-Levenshtein distance int editDistance(const std::string&, const std::string&); #endif //RNN_H_INCLUDED