2using System.Collections.Generic;
43 m_blobSigmoidInput =
new Blob<T>(cuda, log);
45 m_blobSigmoidOutput =
new Blob<T>(cuda, log);
72 base.LayerSetUp(colBottom, colTop);
74 m_colSigmoidBottom.
Clear();
75 m_colSigmoidBottom.
Add(m_blobSigmoidInput);
76 m_colSigmoidTop.
Clear();
77 m_colSigmoidTop.
Add(m_blobSigmoidOutput);
78 m_sigmoidLayer.LayerSetUp(m_colSigmoidBottom, m_colSigmoidTop);
88 base.Reshape(colBottom, colTop);
91 m_sigmoidLayer.Reshape(m_colSigmoidBottom, m_colSigmoidTop);
109 long hBottomData = colBottom[0].gpu_data;
111 long hTopData = colTop[0].mutable_gpu_data;
112 int nCount = colBottom[0].count();
115 m_cuda.copy(nCount, hBottomData, hSigmoidInputData);
116 m_cuda.scal(nCount, dfBeta, hSigmoidInputData);
117 m_sigmoidLayer.Forward(m_colSigmoidBottom, m_colSigmoidTop);
118 m_cuda.mul(nCount, hBottomData, m_blobSigmoidOutput.
gpu_data, hTopData);
142 if (rgbPropagateDown[0])
144 long hTopData = colTop[0].gpu_data;
145 long hTopDiff = colTop[0].gpu_diff;
146 long hSigmoidOutputData = m_blobSigmoidOutput.
gpu_data;
147 long hBottomDiff = colBottom[0].mutable_gpu_diff;
148 int nCount = colBottom[0].count();
151 m_cuda.swish_bwd(nCount, hTopDiff, hTopData, hSigmoidOutputData, hBottomDiff, dfBeta);
The Log class provides general output in text form.
The BlobCollection contains a list of Blobs.
void Add(Blob< T > b)
Add a new Blob to the collection.
void Clear(bool bDispose=false)
Remove all items from the collection.
The Blob is the main holder of data that moves through the Layers of the Net.
long mutable_gpu_data
Returns the data GPU handle used by the CudaDnn connection.
void ReshapeLike(Blob< T > b, bool? bUseHalfSize=null)
Reshape this Blob to have the same shape as another Blob.
string Name
Get/set the name of the Blob.
virtual void Dispose(bool bDisposing)
Releases all resources used by the Blob (including both GPU and Host).
long gpu_data
Returns the data GPU handle used by the CudaDnn connection.
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
LayerParameter.LayerType m_type
Specifies the Layer type.
The NeuronLayer is an interface for layers that take one blob as input (x) and produce only equally-s...
The SigmoidLayer is a neuron layer that calculates the sigmoid function, a classc choice for neural n...
The SwishLayer provides a novel activation function that tends to work better than ReLU....
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Forward computation
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t. the Swish value inputs.
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the layer.
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
SwishLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The SwishLayer constructor.
override void dispose()
Release all resources used.
Engine engine
Specifies the Engine in use.
Specifies the base parameter for all layers.
string name
Specifies the name of this LayerParameter.
SigmoidParameter sigmoid_param
Returns the parameter set when initialized with LayerType.SIGMOID
LayerType
Specifies the layer type.
SwishParameter swish_param
Returns the parameter set when initialized with LayerType.SWISH
double beta
Specifies the beta value for the Swish activation function.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
The MyCaffe.common namespace contains common MyCaffe classes.
The MyCaffe.layers namespace contains all layers that have a solidified code base,...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...