MCordingley\Regression\Algorithm\GradientDescent\Schedule\Adam::step PHP Метод

step() публичный Метод

public step ( integer $featureIndex ) : float
$featureIndex integer
Результат float
    public function step(int $featureIndex) : float
    {
        $correctedMean = $this->means[$featureIndex] / (1.0 - pow($this->meanBeta, $this->iteration));
        $correctedVariance = $this->variances[$featureIndex] / (1.0 - pow($this->varianceBeta, $this->iteration));
        /*
         * Need to put the gradient in the denominator here to counter the one in GradientDescent, since Adam takes
         * the unusual approach of not having it at all in the coefficient update step.
         */
        return $this->gradient[$featureIndex] ? $this->stepSize * $correctedMean / ((sqrt($correctedVariance) + $this->eta) * $this->gradient[$featureIndex]) : 0;
    }

Usage Example

Пример #1
0
 public function testStep()
 {
     $schedule = new Adam(0.01, 1.0E-8, 0.9, 0.999);
     $schedule->update([5.0]);
     $schedule->update([5.0]);
     static::assertEquals(0.0019999999959999857, $schedule->step(0));
 }