2using System.Collections.Generic;
31 List<int> m_rgShape =
new List<int>(4);
49 dispose(ref m_categoricalLayer);
89 blobCategorical =
null;
95 m_log.
CHECK_GT(colBottom[0].count(), 0,
"The bottom(0) must have a count > 0!");
96 blobNumeric = colBottom[0];
100 m_log.
CHECK_GT(colBottom[1].count(), 0,
"The bottom(1) must have a count > 0!");
101 blobCategorical = colBottom[1];
104 if (blobNumeric ==
null && blobCategorical ==
null)
105 m_log.
FAIL(
"At least one of the numeric or categorical num_input must be > 0.");
116 Blob<T> blobCategorical =
null;
120 getBlobs(colBottom, out blobNumeric, out blobCategorical);
122 if (blobNumeric !=
null)
124 m_colNumericTop.
Clear();
128 m_colNumericTop.
Add(blobTop);
136 m_colBtm.
Add(blobNumeric);
137 m_numericLayer.LayerSetUp(m_colBtm, m_colNumericTop);
138 blobs.Add(m_numericLayer.blobs);
141 if (blobCategorical !=
null)
143 m_colCategoricalTop.
Clear();
147 m_colCategoricalTop.
Add(blobTop);
155 m_colBtm.
Add(blobCategorical);
156 m_categoricalLayer.
LayerSetUp(m_colBtm, m_colCategoricalTop);
169 Blob<T> blobCategorical =
null;
174 getBlobs(colBottom, out blobNumeric, out blobCategorical);
176 if (blobNumeric !=
null)
178 nN = colBottom[0].num;
180 m_colBtm.
Add(blobNumeric);
181 m_numericLayer.Reshape(m_colBtm, m_colNumericTop);
183 if (colBottom[0].num_axes > 2)
185 nC = colBottom[0].channels;
194 if (blobCategorical !=
null)
197 m_log.
CHECK_EQ(colBottom[1].num, nN,
"The bottom(0).num and bottom(1).num must be equal!");
199 nN = colBottom[1].num;
201 m_colBtm.
Add(blobCategorical);
202 m_categoricalLayer.
Reshape(m_colBtm, m_colCategoricalTop);
204 if (colBottom[0].num_axes > 2)
207 m_log.
CHECK_EQ(colBottom[1].channels, nC,
"The bottom(0).channels and bottom(1).channels must be equal!");
209 nC = colBottom[1].channels;
228 m_rgShape.Add(nC * nEmb);
233 m_rgShape.Add(nH.Value * nEmb);
255 Blob<T> blobCategorical =
null;
261 getBlobs(colBottom, out blobNumeric, out blobCategorical);
263 if (blobNumeric !=
null)
266 m_colBtm.
Add(blobNumeric);
267 m_numericLayer.Forward(m_colBtm, m_colNumericTop);
268 nCount = m_colNumericTop[0].count();
270 for (
int i = 0; i < m_colNumericTop.
Count; i++)
272 m_cuda.channel_copy(nCount, m_colNumericTop[0].num, 1, nBlocks, nEmb, nIdx, colTop[0].mutable_gpu_data, m_colNumericTop[i].gpu_data,
DIR.BWD);
277 if (blobCategorical !=
null)
280 m_colBtm.
Add(blobCategorical);
281 m_categoricalLayer.
Forward(m_colBtm, m_colCategoricalTop);
282 nCount = m_colCategoricalTop[0].count();
284 for (
int i = 0; i < m_colCategoricalTop.
Count; i++)
286 m_cuda.channel_copy(nCount, m_colCategoricalTop[0].num, 1, nBlocks, nEmb, nIdx, colTop[0].mutable_gpu_data, m_colCategoricalTop[i].gpu_data,
DIR.BWD);
313 Blob<T> blobCategorical =
null;
319 getBlobs(colBottom, out blobNumeric, out blobCategorical);
321 if (blobNumeric !=
null)
323 nCount = m_colNumericTop[0].count();
325 for (
int i = 0; i < m_colNumericTop.
Count; i++)
327 m_cuda.channel_copy(nCount, m_colNumericTop[0].num, 1, nBlocks, nEmb, nIdx, colTop[0].gpu_diff, m_colNumericTop[i].mutable_gpu_diff,
DIR.FWD);
332 m_colBtm.
Add(blobNumeric);
333 m_numericLayer.Backward(m_colNumericTop,
new List<bool>() {
true }, m_colBtm);
336 if (blobCategorical !=
null)
338 nCount = m_colCategoricalTop[0].count();
340 for (
int i = 0; i < m_colCategoricalTop.
Count; i++)
342 m_cuda.channel_copy(nCount, m_colCategoricalTop[0].num, 1, nBlocks, nEmb, nIdx, colTop[0].gpu_diff, m_colCategoricalTop[i].mutable_gpu_diff,
DIR.FWD);
347 m_colBtm.
Add(blobCategorical);
348 m_categoricalLayer.
Backward(m_colCategoricalTop,
new List<bool>() {
true }, m_colBtm);
The Log class provides general output in text form.
void FAIL(string str)
Causes a failure which throws an exception with the desciptive text.
void CHECK_EQ(double df1, double df2, string str)
Test whether one number is equal to another.
void CHECK_GT(double df1, double df2, string str)
Test whether one number is greater than another.
The BlobCollection contains a list of Blobs.
void Dispose()
Release all resource used by the collection and its Blobs.
void Add(Blob< T > b)
Add a new Blob to the collection.
int Count
Returns the number of items in the collection.
void Clear(bool bDispose=false)
Remove all items from the collection.
void Reshape(int[] rgShape)
Reshapes all blobs in the collection to the given shape.
The Blob is the main holder of data that moves through the Layers of the Net.
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
An interface for the units of computation which can be composed into a Net.
Log m_log
Specifies the Log for output.
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
abstract void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Performs Layer specific setup. Derived layers should override this function as well as the Reshape fu...
void Backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Given the top Blob error gradients, compute the bottom Blob error gradients.
double Forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Given the bottom (input) Blobs, this function computes the top (output) Blobs and the loss.
abstract void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Adjust the shapes of top blobs and internal buffers to accomodate the shapes of the bottom blobs.
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
static Layer< T > Create(CudaDnn< T > cuda, Log log, LayerParameter p, CancelEvent evtCancel, IXDatabaseBase db=null, TransferInput trxinput=null)
Create a new Layer based on the LayerParameter.
LayerParameter.LayerType m_type
Specifies the Layer type.
BlobCollection< T > blobs
Returns the collection of learnable parameter Blobs for the Layer.
LayerParameter convertLayerParam(LayerParameter pChild, LayerParameter pParent)
Called to convert a parent LayerParameterEx, used in blob sharing, with a child layer parameter.
The ChannelEmbeddingLayer implements the transforming/embeddings for both the numeric and categorical...
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the top (output) blobs.
override int MaxBottomBlobs
Returns the max number of required bottom (input) Blobs: numeric data, categorical data
override void dispose()
Releases all GPU and host resources used by the Layer.
override void setup_internal_blobs(BlobCollection< T > col)
Derivative layers should add all internal blobws to the 'col' provided.
ChannelEmbeddingLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The constructor.
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Forward computation
override int ExactNumTopBlobs
Returns the exact number of required top (output) Blobs: norm
override int MinBottomBlobs
Returns the min number of required bottom (input) Blobs: numeric data or categorical data (determined...
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t. the stacked embedding numeric and categorical value inputs.
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
Specifies the base parameter for all layers.
string name
Specifies the name of this LayerParameter.
CategoricalTransformationParameter categorical_trans_param
Returns the parameter set when initialized with LayerType.CATEGORICAL_TRANS
NumericTransformationParameter numeric_trans_param
Returns the parameter set when initialized with LayerType.NUMERIC_TRANS
LayerType
Specifies the layer type.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
The MyCaffe.common namespace contains common MyCaffe classes.
DIR
Defines the direction of data flow.
The MyCaffe.layers.tft namespace contains all TFT related layers.
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...