2using System.Collections.Generic;
22 double m_dfAlphaIn = 0.5;
23 double m_dfAlphaOut = 0.5;
45 long m_hWorkspaceData = 0;
46 ulong m_lWorkspaceSizeInBytes = 0;
66 if (m_downsampleLayer1 !=
null)
69 m_downsampleLayer1 =
null;
72 if (m_downsampleLayer2 !=
null)
75 m_downsampleLayer2 =
null;
78 if (m_downsampleLayer3 !=
null)
81 m_downsampleLayer3 =
null;
84 if (m_upsampleLayer !=
null)
87 m_upsampleLayer =
null;
90 if (m_conv_l2l !=
null)
96 if (m_conv_l2h !=
null)
102 if (m_conv_h2l !=
null)
108 if (m_conv_h2h !=
null)
120 if (m_blob_x_h !=
null)
126 if (m_blob_x_l !=
null)
132 if (m_blob_x_h_ds !=
null)
135 m_blob_x_h_ds =
null;
138 if (m_blob_x_l_ds !=
null)
141 m_blob_x_l_ds =
null;
144 if (m_blob_x_h2h !=
null)
150 if (m_blob_x_h2l !=
null)
156 if (m_blob_x_l2l !=
null)
162 if (m_blob_x_l2h !=
null)
168 if (m_blob_x_l2h_us !=
null)
171 m_blob_x_l2h_us =
null;
174 if (m_hWorkspaceData != 0)
176 m_cuda.FreeMemory(m_hWorkspaceData);
177 m_hWorkspaceData = 0;
225 m_log.
CHECK_GE(m_dfAlphaIn, 0,
"The alpha in must be >= 0.");
226 m_log.
CHECK_LE(m_dfAlphaIn, 1,
"The alpha in must be <= 1.");
227 m_log.
CHECK_GE(m_dfAlphaOut, 0,
"The alpha out must be >= 0.");
228 m_log.
CHECK_LT(m_dfAlphaOut, 1,
"The alpha out must be < 1.");
241 m_blob_x_h.
Name =
"x_h";
243 m_blob_x_h2h.
Name =
"x_h2h";
245 if (m_dfAlphaOut > 0)
248 m_blob_x_h_ds.
Name =
"x_h_ds";
250 m_blob_x_h2l.
Name =
"x_h2l";
254 if (colBottom.
Count > 1)
257 m_blob_x_l.
Name =
"x_l";
259 m_blob_x_l_ds.
Name =
"x_l_ds";
261 m_blob_x_l2h.
Name =
"x_l2h";
263 m_blob_x_l2h_us.
Name =
"x_l2h_us";
265 m_blob_x_l2l.
Name =
"x_l2l";
281 setupBtmTop(colBottom[0], m_blob_x_h);
282 m_downsampleLayer1.
LayerSetUp(m_rgBtm, m_rgTop);
283 m_downsampleLayer1.
Reshape(m_rgBtm, m_rgTop);
299 int nInChannels = colBottom[0].channels;
307 convParam.
name =
"h2h conv";
309 nGroupTmp = (uint)Math.Ceiling(nGroup - m_dfAlphaIn * nGroup);
316 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
318 m_conv_h2h.
Reshape(m_rgBtm, m_rgTop);
323 if (m_dfAlphaOut > 0)
326 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
327 m_downsampleLayer2.
LayerSetUp(m_rgBtm, m_rgTop);
328 m_downsampleLayer2.
Reshape(m_rgBtm, m_rgTop);
331 convParam.
name =
"h2l conv";
339 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
341 m_conv_h2l.
Reshape(m_rgBtm, m_rgTop);
345 if (colBottom.
Count > 1)
353 setupBtmTop(colBottom[1], m_blob_x_l_ds);
354 m_downsampleLayer3.
LayerSetUp(m_rgBtm, m_rgTop);
355 m_downsampleLayer3.
Reshape(m_rgBtm, m_rgTop);
363 if (m_dfAlphaOut > 0)
366 convParam.
name =
"l2l conv";
368 nGroupTmp = (uint)Math.Ceiling(m_dfAlphaIn * nGroup);
374 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
376 m_conv_l2l.
Reshape(m_rgBtm, m_rgTop);
383 convParam.
name =
"l2h conv";
390 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
392 m_conv_l2h.
Reshape(m_rgBtm, m_rgTop);
403 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
405 m_upsampleLayer.
Reshape(m_rgBtm, m_rgTop);
414 eltAdd.
name =
"eltadd";
418 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
420 m_add.
Reshape(m_rgBtm, m_rgTop);
422 if (m_dfAlphaOut > 0)
424 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
425 m_add.
Reshape(m_rgBtm, m_rgTop);
430 private void layer_OnSetWorkspace(
object sender,
WorkspaceArgs e)
436 m_cuda.DisableGhostMemory();
438 if (m_hWorkspaceData != 0)
439 m_cuda.FreeMemory(m_hWorkspaceData);
441 m_hWorkspaceData =
m_cuda.AllocMemory((
long)m_lWorkspaceSizeInBytes);
442 m_cuda.ResetGhostMemory();
445 private void layer_OnGetWorkspace(
object sender,
WorkspaceArgs e)
477 setupBtmTop(colBottom[0], m_blob_x_h);
478 m_downsampleLayer1.
Reshape(m_rgBtm, m_rgTop);
485 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
486 m_conv_h2h.
Reshape(m_rgBtm, m_rgTop);
488 if (m_dfAlphaOut > 0)
490 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
491 m_downsampleLayer2.
Reshape(m_rgBtm, m_rgTop);
492 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
493 m_conv_h2l.
Reshape(m_rgBtm, m_rgTop);
496 if (colBottom.
Count > 1)
502 setupBtmTop(colBottom[1], m_blob_x_l_ds);
503 m_downsampleLayer3.
Reshape(m_rgBtm, m_rgTop);
510 if (m_dfAlphaOut > 0)
512 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
513 m_conv_l2l.
Reshape(m_rgBtm, m_rgTop);
516 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
517 m_conv_l2h.
Reshape(m_rgBtm, m_rgTop);
521 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
522 m_upsampleLayer.
Reshape(m_rgBtm, m_rgTop);
529 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
530 m_add.
Reshape(m_rgBtm, m_rgTop);
532 if (m_dfAlphaOut > 0)
534 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
535 m_add.
Reshape(m_rgBtm, m_rgTop);
542 if (m_dfAlphaOut > 0)
560 setupBtmTop(colBottom[0], m_blob_x_h);
561 m_downsampleLayer1.
Forward(m_rgBtm, m_rgTop);
568 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
569 m_conv_h2h.
Forward(m_rgBtm, m_rgTop);
571 if (m_dfAlphaOut > 0)
573 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
574 m_downsampleLayer2.
Forward(m_rgBtm, m_rgTop);
575 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
576 m_conv_h2l.
Forward(m_rgBtm, m_rgTop);
579 if (colBottom.
Count > 1)
585 setupBtmTop(m_blob_x_l, m_blob_x_l_ds);
586 m_downsampleLayer3.
Forward(m_rgBtm, m_rgTop);
593 if (m_dfAlphaOut > 0)
595 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
596 m_conv_l2l.
Forward(m_rgBtm, m_rgTop);
599 setupBtmTop(m_blob_x_l, m_blob_x_l2h);
600 m_conv_l2h.
Forward(m_rgBtm, m_rgTop);
604 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
605 m_upsampleLayer.
Forward(m_rgBtm, m_rgTop);
609 m_blob_x_l2h_us.
CopyFrom(m_blob_x_l2h);
612 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
613 m_add.
Forward(m_rgBtm, m_rgTop);
615 if (m_dfAlphaOut > 0)
617 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
618 m_add.
Forward(m_rgBtm, m_rgTop);
627 if (m_dfAlphaOut > 0)
643 if (!rgbPropagateDown[0])
646 if (colBottom.
Count > 1)
648 if (m_dfAlphaOut > 0)
650 setupBtmTop(m_blob_x_h2l, m_blob_x_l2l, colTop[1]);
651 m_add.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
654 setupBtmTop(m_blob_x_l2h_us, m_blob_x_h2h, colTop[0]);
655 m_add.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
659 setupBtmTop(m_blob_x_l2h, m_blob_x_l2h_us);
660 m_upsampleLayer.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
664 m_blob_x_l2h.
CopyFrom(m_blob_x_l2h_us,
true);
667 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2h);
668 m_conv_l2h.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
670 if (m_dfAlphaOut > 0)
672 setupBtmTop(m_blob_x_l_ds, m_blob_x_l2l);
673 m_conv_l2l.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
678 setupBtmTop(m_blob_x_l, m_blob_x_l_ds);
679 m_downsampleLayer3.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
683 m_blob_x_l.
CopyFrom(m_blob_x_l_ds,
true);
686 colBottom[1].
CopyFrom(m_blob_x_l,
true);
690 m_blob_x_h2h.
CopyFrom(colTop[0],
true);
692 if (m_dfAlphaOut > 0)
693 m_blob_x_h2l.
CopyFrom(colTop[1],
true);
696 if (m_dfAlphaOut > 0)
698 setupBtmTop(m_blob_x_h_ds, m_blob_x_h2l);
699 m_conv_h2l.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
701 setupBtmTop(m_blob_x_h, m_blob_x_h_ds);
702 m_downsampleLayer2.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
705 setupBtmTop(m_blob_x_h, m_blob_x_h2h);
706 m_conv_h2h.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
710 setupBtmTop(colBottom[0], m_blob_x_h);
711 m_downsampleLayer1.
Backward(m_rgTop, rgbPropagateDown, m_rgBtm);
715 colBottom[0].
CopyFrom(m_blob_x_h,
true);
The Log class provides general output in text form.
void CHECK_LE(double df1, double df2, string str)
Test whether one number is less than or equal to another.
void CHECK_GE(double df1, double df2, string str)
Test whether one number is greater than or equal to another.
void CHECK_LT(double df1, double df2, string str)
Test whether one number is less than another.
The BlobCollection contains a list of Blobs.
void Add(Blob< T > b)
Add a new Blob to the collection.
int Count
Returns the number of items in the collection.
void Clear(bool bDispose=false)
Remove all items from the collection.
void ReshapeLike(BlobCollection< T > src)
Reshapes all blobs in the collection to the sizes of the source.
void CopyFrom(BlobCollection< T > bSrc, bool bCopyDiff=false)
Copy the data or diff from another BlobCollection into this one.
The Blob is the main holder of data that moves through the Layers of the Net.
void CopyFrom(Blob< T > src, int nSrcOffset, int nDstOffset, int nCount, bool bCopyData, bool bCopyDiff)
Copy from a source Blob.
void ReshapeLike(Blob< T > b, bool? bUseHalfSize=null)
Reshape this Blob to have the same shape as another Blob.
string Name
Get/set the name of the Blob.
virtual void Dispose(bool bDisposing)
Releases all resources used by the Blob (including both GPU and Host).
The CudaDnn object is the main interface to the Low-Level Cuda C++ DLL.
The WorkspaceArgs are passed to both the Layer::OnSetWorkspace and Layer::OnGetWorkspace events.
long WorkspaceData
Get/set the handle to workspace data in GPU memory.
ulong WorkspaceSizeInBytes
Get/set the workspace memory size in bytes.
An interface for the units of computation which can be composed into a Net.
Log m_log
Specifies the Log for output.
LayerParameter m_param
Specifies the LayerParameter describing the Layer.
EventHandler< WorkspaceArgs > OnGetWorkspace
Specifies the OnGetWorkspace event that fires when the getWorkspace() function is called by a layer t...
abstract void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Performs Layer specific setup. Derived layers should override this function as well as the Reshape fu...
void Backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Given the top Blob error gradients, compute the bottom Blob error gradients.
double Forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Given the bottom (input) Blobs, this function computes the top (output) Blobs and the loss.
abstract void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Adjust the shapes of top blobs and internal buffers to accomodate the shapes of the bottom blobs.
void Dispose()
Releases all GPU and host resources used by the Layer.
CudaDnn< T > m_cuda
Specifies the CudaDnn connection to Cuda.
EventHandler< WorkspaceArgs > OnSetWorkspace
Specifies the OnSetWorkspace event that fires when the setWorkspace() function is called by a layer t...
static Layer< T > Create(CudaDnn< T > cuda, Log log, LayerParameter p, CancelEvent evtCancel, IXDatabaseBase db=null, TransferInput trxinput=null)
Create a new Layer based on the LayerParameter.
LayerParameter.LayerType m_type
Specifies the Layer type.
BlobCollection< T > blobs
Returns the collection of learnable parameter Blobs for the Layer.
BlobCollection< T > m_colBlobs
Specifies the learnable parameter Blobs of the Layer.
The ConvolutionOctaveLayer processes high and low frequency portions of images using convolution.
override void backward(BlobCollection< T > colTop, List< bool > rgbPropagateDown, BlobCollection< T > colBottom)
Computes the error gradient w.r.t. the concatenate inputs.
override int MaxTopBlobs
Returns the maximum number of top (output) Blobs: out_h, out_l
override int MaxBottomBlobs
Returns the maximum number of bottom (input) Blobs: in_h, in_l
override void Reshape(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Reshape the bottom (input) and top (output) blobs.
ConvolutionOctaveLayer(CudaDnn< T > cuda, Log log, LayerParameter p)
The ConvolutionOctaveLayer constructor.
override int MinBottomBlobs
Returns the minimum number of required bottom (input) Blobs: input.
override void dispose()
Release all resources used.
override void LayerSetUp(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Setup the layer.
override int MinTopBlobs
Returns the minimum number of required top (output) Blobs: output
override void forward(BlobCollection< T > colBottom, BlobCollection< T > colTop)
Forward computation.
double alpha_out
Specifies alpha applied to the output channels.
double alpha_in
Specifies alpha applied to the input channels.
uint group
The group size for group convolution.
FillerParameter bias_filler
The filler for the bias. The default is set to use the 'constant = 0.1' filler.
bool bias_term
Whether to have bias terms or not.
uint num_output
The number of outputs for the layer.
Specifies the parameters for the EltwiseLayer.
EltwiseOp
Defines the operation to perform.
EltwiseOp operation
Specifies the element-wise operation.
Engine engine
Specifies the Engine in use.
int? zoom_factor
Specifies the height of the output.
List< uint > kernel_size
Kernel size is given as a single value for equal dimensions in all spatial dimensions,...
List< uint > dilation
Factor used to dilate the kernel, (implicitly) zero-filling the resulting holes. (Kernel dilation is ...
List< uint > stride
Stride is given as a single value for equal dimensions in all spatial dimensions, or once per spatial...
List< uint > pad
Pad is given as a single value for equal dimensions in all spatial dimensions, or once per spatial di...
Specifies the base parameter for all layers.
InterpParameter interp_param
Returns the parameter set when initializing the LayerType.INTERP
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
string name
Specifies the name of this LayerParameter.
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
EltwiseParameter eltwise_param
Returns the parameter set when initialized with LayerType.ELTWISE
ConvolutionOctaveParameter convolution_octave_param
Returns the parameter set when initialized with LayerType.CONVOLUTION_OCTAVE
LayerType
Specifies the layer type.
virtual LayerParameter Clone(bool bCloneBlobs)
Creates a new copy of this instance of the parameter.
Specifies the parameters for the PoolingLayer.
PoolingMethod
Defines the pooling method.
PoolingMethod pool
Specifies the pooling method.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
The MyCaffe.common namespace contains common MyCaffe classes.
The MyCaffe.layers.beta namespace contains all beta stage layers.
The MyCaffe.param.beta parameters are used by the MyCaffe.layer.beta layers.
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...