人工神经网络框架AForge学习(二):Sigmoid激活函数
2009-03-06 17:09
676 查看
Code
namespace AForge.Neuro
{
using System;
/// <summary>
/// Sigmoid activation function
/// 西格玛激活函数
/// </summary>
///
/// <remarks>The class represents sigmoid activation function with
/// the next expression:<br />
/// 这个类表示西格玛激活函数按照下面的公示描述:
/// <code>
/// 1
/// f(x) = ------------------
/// 1 + exp(-alpha * x)
///
/// alpha * exp(-alpha * x )
/// f'(x) = ---------------------------- = alpha * f(x) * (1 - f(x))
/// (1 + exp(-alpha * x))^2
/// </code>
/// Output range of the function: <b>[0, 1]</b><br /><br />
/// 输出范围在[0,1]区间
/// Functions graph:<br />
/// <img src="sigmoid.bmp" width="242" height="172" />
/// </remarks>
public class SigmoidFunction : IActivationFunction
{
// sigmoid's alpha value
// 阀值、偏置、活性值?
private double alpha = 2;
/// <summary>
/// Sigmoid's alpha value
/// </summary>
///
/// <remarks>The value determines steepness of the function. Default value: <b>2</b>.
/// </remarks>
public double Alpha
{
get { return alpha; }
set { alpha = value; }
}
/// <summary>
/// Initializes a new instance of the <see cref="SigmoidFunction"/> class
/// 初始化
/// </summary>
public SigmoidFunction( ) { }
/// <summary>
/// Initializes a new instance of the <see cref="SigmoidFunction"/> class
/// </summary>
///
/// <param name="alpha">Sigmoid's alpha value 指定修正值</param>
public SigmoidFunction( double alpha )
{
this.alpha = alpha;
}
/// <summary>
/// Calculates function value
/// 计算函数值
/// </summary>
///
/// <param name="x">Function input value</param>
///
/// <returns>Function output value, <i>f(x)</i></returns>
///
/// <remarks>The method calculates function value at point <b>x</b>.</remarks>
///
public double Function( double x )
{
return ( 1 / ( 1 + Math.Exp( -alpha * x ) ) );
}
/// <summary>
/// Calculates function derivative
/// 计算导数
/// </summary>
///
/// <param name="x">Function input value</param>
///
/// <returns>Function derivative, <i>f'(x)</i> 返回导数值</returns>
///
/// <remarks> The method calculates function derivative at point <b>x</b>.</remarks>
///
public double Derivative( double x )
{
double y = Function( x );
return ( alpha * y * ( 1 - y ) );
}
/// <summary>
/// Calculates function derivative
/// 计算导数
/// </summary>
///
/// <param name="y">Function output value - the value, which was obtained
/// with the help of <see cref="Function"/> method
/// 由Function计算出来的y
/// </param>
///
/// <returns>Function derivative, <i>f'(x)</i></returns>
///
/// <remarks>The method calculates the same derivative value as the
/// <see cref="Derivative"/> method, but it takes not the input <b>x</b> value
/// itself, but the function value, which was calculated previously with
/// the help of <see cref="Function"/> method. <i>(Some applications require as
/// function value, as derivative value, so they can seve the amount of
/// calculations using this method to calculate derivative)</i>
/// 这个方法和Derivative一样计算导数,但它不以输入值x为参数,而是以原先由Funcion计算
/// 出的y为参数.
/// </remarks>
///
public double Derivative2( double y )
{
return ( alpha * y * ( 1 - y ) );
}
}
}
namespace AForge.Neuro
{
using System;
/// <summary>
/// Sigmoid activation function
/// 西格玛激活函数
/// </summary>
///
/// <remarks>The class represents sigmoid activation function with
/// the next expression:<br />
/// 这个类表示西格玛激活函数按照下面的公示描述:
/// <code>
/// 1
/// f(x) = ------------------
/// 1 + exp(-alpha * x)
///
/// alpha * exp(-alpha * x )
/// f'(x) = ---------------------------- = alpha * f(x) * (1 - f(x))
/// (1 + exp(-alpha * x))^2
/// </code>
/// Output range of the function: <b>[0, 1]</b><br /><br />
/// 输出范围在[0,1]区间
/// Functions graph:<br />
/// <img src="sigmoid.bmp" width="242" height="172" />
/// </remarks>
public class SigmoidFunction : IActivationFunction
{
// sigmoid's alpha value
// 阀值、偏置、活性值?
private double alpha = 2;
/// <summary>
/// Sigmoid's alpha value
/// </summary>
///
/// <remarks>The value determines steepness of the function. Default value: <b>2</b>.
/// </remarks>
public double Alpha
{
get { return alpha; }
set { alpha = value; }
}
/// <summary>
/// Initializes a new instance of the <see cref="SigmoidFunction"/> class
/// 初始化
/// </summary>
public SigmoidFunction( ) { }
/// <summary>
/// Initializes a new instance of the <see cref="SigmoidFunction"/> class
/// </summary>
///
/// <param name="alpha">Sigmoid's alpha value 指定修正值</param>
public SigmoidFunction( double alpha )
{
this.alpha = alpha;
}
/// <summary>
/// Calculates function value
/// 计算函数值
/// </summary>
///
/// <param name="x">Function input value</param>
///
/// <returns>Function output value, <i>f(x)</i></returns>
///
/// <remarks>The method calculates function value at point <b>x</b>.</remarks>
///
public double Function( double x )
{
return ( 1 / ( 1 + Math.Exp( -alpha * x ) ) );
}
/// <summary>
/// Calculates function derivative
/// 计算导数
/// </summary>
///
/// <param name="x">Function input value</param>
///
/// <returns>Function derivative, <i>f'(x)</i> 返回导数值</returns>
///
/// <remarks> The method calculates function derivative at point <b>x</b>.</remarks>
///
public double Derivative( double x )
{
double y = Function( x );
return ( alpha * y * ( 1 - y ) );
}
/// <summary>
/// Calculates function derivative
/// 计算导数
/// </summary>
///
/// <param name="y">Function output value - the value, which was obtained
/// with the help of <see cref="Function"/> method
/// 由Function计算出来的y
/// </param>
///
/// <returns>Function derivative, <i>f'(x)</i></returns>
///
/// <remarks>The method calculates the same derivative value as the
/// <see cref="Derivative"/> method, but it takes not the input <b>x</b> value
/// itself, but the function value, which was calculated previously with
/// the help of <see cref="Function"/> method. <i>(Some applications require as
/// function value, as derivative value, so they can seve the amount of
/// calculations using this method to calculate derivative)</i>
/// 这个方法和Derivative一样计算导数,但它不以输入值x为参数,而是以原先由Funcion计算
/// 出的y为参数.
/// </remarks>
///
public double Derivative2( double y )
{
return ( alpha * y * ( 1 - y ) );
}
}
}
相关文章推荐
- 机器学习笔记-神经网络中激活函数(activation function)对比--Sigmoid、ReLu,tanh
- python绘制神经网络中的Sigmoid和Tanh激活函数图像(附代码)
- 神经网络与深度学习之激活函数
- 深度学习-基础概念:神经元(Neurons)、Sigmoid 函数与神经网络基本结构
- 神经网络中的激活函数(activation function)-Sigmoid, ReLu, TanHyperbolic(tanh), softmax, softplus
- 神经网络中的激活函数——加入一些非线性的激活函数,整个网络中就引入了非线性部分,sigmoid 和 tanh作为激活函数的话,一定要注意一定要对 input 进行归一话,但是 ReLU 并不需要输入归一化
- 机器学习-神经网络之激活函数(Activation Function)
- 深度学习小白——神经网络1(激活函数)
- 深度学习/神经神经网络常用激活函数总结
- 论文学习:二值神经网络BNN-用+1或-1限制的权值和激活函数来训练神经网络
- 神经网络常用激活函数对比:sigmoid VS sofmax(附python源码)
- [DeeplearningAI笔记]神经网络与深度学习3.2_3.11(激活函数)浅层神经网络
- 神经网络学习(五)优化方法:激活函数
- 神经网络之激活函数(sigmoid、tanh、ReLU)
- AI神经网络激活函数sigmoid及matlab的sigmf
- 神经网络学习笔记 - 激活函数的作用、定义和微分证明
- 深度学习笔记之神经网络、激活函数、目标函数和深度的初步认识
- 深度学习神经网络激活函数的优点
- 机器学习笔记:形象的解释神经网络激活函数的作用是什么?
- 深度学习中的数学与技巧(13):神经网络之激活函数