PHP Classes

File: TEST/TrainingTest/Optimizer_Adam_Test.php

Recommend this page to a friend!
  Classes of Cuthbert Martin Lwinga   PHP Neural Net Library   TEST/TrainingTest/Optimizer_Adam_Test.php   Download  
File: TEST/TrainingTest/Optimizer_Adam_Test.php
Role: Example script
Content type: text/plain
Description: Example script
Class: PHP Neural Net Library
Build, train, evaluate, and use neural networks
Author: By
Last change:
Date: 6 months ago
Size: 1,322 bytes
 

Contents

Class file image Download
<?php
include_once("../../CLASSES/Headers.php");
use
NameSpaceNumpyLight\NumpyLight;
use
NameSpaceRandomGenerator\RandomGenerator;
use
NameSpaceActivationRelu\Activation_Relu;

list(
$X, $y) = NumpyLight::spiral_data(100, 3);

// Create layers and activations
$dense1 = new Layer_Dense(2, 64);
$activation1 = new Activation_ReLU();
$dense2 = new Layer_Dense(64, 3);
$loss_activation = new Activation_Softmax_Loss_CategoricalCrossentropy();
$optimizer = new Optimizer_Adam(0.05, 5e-7);

// // Train the network
for ($epoch = 0; $epoch <= 10000; $epoch++) {
   
$dense1->forward($X);
   
$activation1->forward($dense1->output);
   
$dense2->forward($activation1->output);
   
$loss = $loss_activation->forward($dense2->output, $y);
   
$predictions = NumpyLight::accuracy($loss_activation->output, $y);
   
    if ((
$epoch%100==0)) {
        echo
"epoc: $epoch ,\tacc: $predictions\t,loss: $loss,\tlr: ".$optimizer->current_learning_rate." \n";
    }

   
# Backward pass

   
$loss_activation->backward($loss_activation->output, $y);
   
$dense2->backward($loss_activation->dinputs);
   
$activation1->backward($dense2->dinputs);
   
$dense1->backward($activation1->dinputs);
   
   
# Update weights and biases

   
$optimizer->pre_update_params();
   
$optimizer->update_params($dense1);
   
$optimizer->update_params($dense2);
   
$optimizer->post_update_params();
}



?>