2using System.Collections.Generic;
58 base.Reshape(colBottom, colTop);
60 m_log.
CHECK_EQ(1, colBottom[1].channels,
"The bottom[1] should have 1 channel.");
61 m_log.
CHECK_EQ(1, colBottom[1].height,
"The bottom[1] should have height = 1.");
62 m_log.
CHECK_EQ(1, colBottom[1].width,
"The bottom[1] should have width = 1.");
88 int nNum = colBottom[0].num;
89 int nDim = colBottom[0].count() / nNum;
92 if (typeof(T) == typeof(
double))
94 double[] rgBottomData = (
double[])Convert.ChangeType(colBottom[0].update_cpu_data(), typeof(
double[]));
95 double[] rgBottomLabel = (
double[])Convert.ChangeType(colBottom[1].update_cpu_data(), typeof(
double[]));
97 for (
int i = 0; i < nNum; i++)
99 int nLabel = (int)rgBottomLabel[i];
101 double dfProb = Math.Max(rgBottomData[i * nDim + nLabel],
kLOG_THRESHOLD);
102 dfLoss -= Math.Log(dfProb);
107 float[] rgBottomData = (
float[])Convert.ChangeType(colBottom[0].update_cpu_data(), typeof(
float[]));
108 float[] rgBottomLabel = (
float[])Convert.ChangeType(colBottom[1].update_cpu_data(), typeof(
float[]));
110 for (
int i = 0; i < nNum; i++)
112 int nLabel = (int)rgBottomLabel[i];
114 double dfProb = Math.Max(rgBottomData[i * nDim + nLabel], (
float)
kLOG_THRESHOLD);
115 dfLoss -= Math.Log(dfProb);
119 colTop[0].
SetData(dfLoss / nNum, 0);
155 if (rgbPropagateDown[1])
158 if (rgbPropagateDown[0])
160 int nNum = colBottom[0].num;
161 int nDim = colBottom[0].count() / nNum;
162 double dfScale = -1 *
convertD(colTop[0].GetDiff(0)) / nNum;
166 if (typeof(T) == typeof(
double))
168 double[] rgBottomData = (
double[])Convert.ChangeType(colBottom[0].update_cpu_data(), typeof(
double[]));
169 double[] rgBottomLabel = (
double[])Convert.ChangeType(colBottom[1].update_cpu_data(), typeof(
double[]));
170 double[] rgBottomDiff = (
double[])Convert.ChangeType(colBottom[0].mutable_cpu_diff, typeof(
double[]));
172 for (
int i = 0; i < nNum; i++)
174 int nLabel = (int)rgBottomLabel[i];
176 double dfProb = Math.Max(rgBottomData[i * nDim + nLabel],
kLOG_THRESHOLD);
177 rgBottomDiff[i * nDim + nLabel] = dfScale / dfProb;
180 colBottom[0].mutable_cpu_data = (T[])Convert.ChangeType(rgBottomDiff, typeof(T[]));
184 float[] rgBottomData = (
float[])Convert.ChangeType(colBottom[0].update_cpu_data(), typeof(
float[]));
185 float[] rgBottomLabel = (
float[])Convert.ChangeType(colBottom[1].update_cpu_data(), typeof(
float[]));
186 float[] rgBottomDiff = (
float[])Convert.ChangeType(colBottom[0].mutable_cpu_diff, typeof(
float[]));
188 for (
int i = 0; i < nNum; i++)
190 int nLabel = (int)rgBottomLabel[i];
192 double dfProb = Math.Max(rgBottomData[i * nDim + nLabel],
kLOG_THRESHOLD);
193 rgBottomDiff[i * nDim + nLabel] = (float)(dfScale / dfProb);
196 colBottom[0].mutable_cpu_data = (T[])Convert.ChangeType(rgBottomDiff, typeof(T[]));
The Log class provides general output in text form.
void FAIL(string str)
Causes a failure which throws an exception with the desciptive text.
void CHECK_EQ(double df1, double df2, string str)
Test whether one number is equal to another.
The BlobCollection contains a list of Blobs.
void SetData(double df)
Set all blob data to the value specified.
void SetDiff(double df)
Set all blob diff to the value specified.
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
Log m_log
Specifies the Log for output.
LayerParameter.LayerType type
Returns the LayerType of this Layer.
double convertD(T df)
Converts a generic to a double value.
LayerParameter.LayerType m_type
Specifies the Layer type.
The LossLayer provides an interface for Layer's that take two blobs as input – usually (1) prediction...
const double kLOG_THRESHOLD
Specifies the minimum threshold for loss values.
The MultinomialLogicistLossLayer computes the multinomial logistc loss for a one-of-many classificati...
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the infogain loss error gradient w.r.t the predictions.
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
The forward computation.
MultinomialLogisticLossLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
Constructor.
Specifies the base parameter for all layers.
LayerType
Specifies the layer type.
override string ToString()
Returns a string representation of the LayerParameter.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
The MyCaffe.common namespace contains common MyCaffe classes.
The MyCaffe.layers namespace contains all layers that have a solidified code base,...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...