5using System.Collections.Generic;
8using System.Threading.Tasks;
89 protected string getFileName(
string strFile,
string strSubDir)
93 if (!
string.IsNullOrEmpty(strSubDir))
94 strOut += strSubDir +
"\\";
117 if (p.
name == strName)
155 if (transform !=
null)
170 data.
top.Add(strName);
173 data.
top.Add(
"label");
198 if (transform !=
null)
203 if (rgSampler !=
null)
212 data.
top.Add(
"data");
215 data.
top.Add(
"label");
244 protected List<LayerParameter>
createMultiBoxHead(
LayerParameter data,
int nNumClasses, List<MultiBoxHeadInfo> rgInfo, List<float> rgPriorVariance,
bool bUseObjectness =
false,
bool bUseBatchNorm =
true,
double dfLrMult = 1.0,
bool useScale =
true,
int nImageHt = 0,
int nImageWd = 0,
bool bShareLocation =
true,
bool bFlip =
true,
bool bClip =
true,
double dfOffset = 0.5,
int nKernelSize = 1,
int nPad = 0,
string strConfPostfix =
"",
string strLocPostfix =
"")
249 for (
int i = 1; i < rgInfo.Count; i++)
251 if (!rgInfo[0].Verify(rgInfo[1]))
252 throw new Exception(
"The multi-bix header info must be consistent across all items.");
255 if (nNumClasses <= 0)
256 throw new Exception(
"The number of classes must be > 0.");
258 List<string> rgstrLocLayers =
new List<string>();
259 List<string> rgstrConfLayers =
new List<string>();
260 List<string> rgstrPriorBoxLayers =
new List<string>();
261 List<string> rgstrObjLayers =
new List<string>();
263 for (
int i = 0; i < rgInfo.Count; i++)
270 if (rgInfo[i].Normalization.HasValue && rgInfo[i].Normalization.Value != -1)
273 norm.
name = fromLayer.
name +
"_norm";
284 if (rgInfo[i].InterLayerDepth.HasValue && rgInfo[i].InterLayerDepth.Value > 0)
285 fromLayer =
addConvBNLayer(fromLayer.
name, fromLayer.
name +
"_inter", bUseBatchNorm,
true, (
int)rgInfo[i].InterLayerDepth.Value, 3, 1, 1, dfLrMult);
290 double? dfMinSize = rgInfo[i].MinSize;
291 double? dfMaxSize = rgInfo[i].MaxSize;
292 double? dfAspectHt = rgInfo[i].AspectRatioHeight;
293 double? dfAspectWd = rgInfo[i].AspectRatioWidth;
294 double? dfStepWd = rgInfo[i].StepWidth;
295 double? dfStepHt = rgInfo[i].StepHeight;
296 int nAspectLen = (dfAspectWd == dfAspectHt) ? 1 : 2;
297 int nNumPriorsPerLocation = (dfMaxSize.HasValue) ? (2 + nAspectLen) : (1 + nAspectLen);
300 nNumPriorsPerLocation += nAspectLen;
305 int nNumLocOutput = nNumPriorsPerLocation * 4;
307 nNumLocOutput *= nNumClasses;
309 strName = fromLayer.
name +
"_mbox_loc" + strLocPostfix;
310 lastLayer =
addConvBNLayer(fromLayer.
name, strName, bUseBatchNorm,
false, nNumLocOutput, nKernelSize, nPad, 1, dfLrMult);
313 permute.
name = strName +
"_perm";
319 flatten.
name = strName +
"_flat";
323 rgstrLocLayers.Add(lastLayer.
name);
328 strName = fromLayer.
name +
"_mbox_conf" + strConfPostfix;
329 int nNumConfOutput = nNumPriorsPerLocation * nNumClasses;
330 lastLayer =
addConvBNLayer(fromLayer.
name, strName, bUseBatchNorm,
false, nNumConfOutput, nKernelSize, nPad, 1, dfLrMult);
333 permute.
name = strName +
"_perm";
339 flatten.
name = strName +
"_flat";
343 rgstrConfLayers.Add(lastLayer.
name);
348 strName = fromLayer.
name +
"_mbox_priorbox";
350 priorbox.
name = strName;
351 priorbox.
top.Add(priorbox.
name);
357 if (dfMaxSize.HasValue)
360 if (dfAspectWd.HasValue)
363 if (dfAspectHt.HasValue)
366 if (dfStepWd.HasValue && dfStepHt.HasValue)
368 if (dfStepWd.Value == dfStepHt.Value)
379 if (nImageHt != 0 && nImageWd != 0)
381 if (nImageHt == nImageWd)
394 rgstrPriorBoxLayers.Add(lastLayer.
name);
401 strName = fromLayer.
name +
"_mbox_objectness";
402 int nNumObjOutput = nNumPriorsPerLocation * 2;
403 lastLayer =
addConvBNLayer(fromLayer.
name, strName, bUseBatchNorm,
false, nNumObjOutput, nKernelSize, nPad, 1, dfLrMult);
406 permute.
name = strName +
"_perm";
411 flatten.
name = strName +
"_flat";
414 rgstrObjLayers.Add(lastLayer.
name);
421 List<LayerParameter> rgMboxLayers =
new List<LayerParameter>();
422 strName =
"mbox_loc";
425 concat.
name = strName;
427 concat.
bottom = rgstrLocLayers;
430 rgMboxLayers.Add(concat);
432 strName =
"mbox_conf";
434 concat.
name = strName;
436 concat.
bottom = rgstrConfLayers;
439 rgMboxLayers.Add(concat);
441 strName =
"mbox_priorbox";
443 concat.
name = strName;
445 concat.
bottom = rgstrPriorBoxLayers;
448 rgMboxLayers.Add(concat);
452 strName =
"mbox_objectness";
454 concat.
name = strName;
456 concat.
bottom = rgstrObjLayers;
459 rgMboxLayers.Add(concat);
491 protected LayerParameter addConvBNLayer(
string strInputLayer,
string strOutputLayer,
bool bUseBatchNorm,
bool bUseRelU,
int nNumOutput,
int nKernelSize,
int nPad,
int nStride,
double dfLrMult = 1.0,
int nDilation = 1,
SCALE_BIAS useScale =
SCALE_BIAS.SCALE,
string strConvPrefix =
"",
string strConvPostfix =
"",
string strBnPrefix =
"",
string strBnPostfix =
"_bn",
string strScalePrefix =
"",
string strScalePostFix =
"_scale",
string strBiasPrefix =
"",
string strBiasPostfix =
"_bias",
bool bNamedParams =
false,
string strLayerPostfix =
"",
Phase phaseExclude =
Phase.NONE)
494 string strName = strConvPrefix + strOutputLayer + strConvPostfix;
501 convLayer.
name = strName + strLayerPostfix;
507 convLayer.
top.Add(convLayer.
name);
520 convLayer.
parameters.Add(
new ParamSpec(dfLrMult, 1.0, (bNamedParams) ? strName +
"_w" :
null));
525 strName = strBnPrefix + strOutputLayer + strBnPostfix;
526 bnLayer.
name = strName + strLayerPostfix;
536 double dfBnLrMult = dfLrMult;
546 strName = strScalePrefix + strOutputLayer + strScalePostFix;
547 scaleLayer.
name = strName + strLayerPostfix;
551 scaleLayer.
parameters.Add(
new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName +
"_w" :
null));
552 scaleLayer.
parameters.Add(
new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName +
"_b" :
null));
553 scaleLayer.
top.Add(scaleLayer.
name);
559 strName = strBiasPrefix + strOutputLayer + strBiasPostfix;
560 biasLayer.
name = strName + strLayerPostfix;
562 biasLayer.
parameters.Add(
new ParamSpec(dfBnLrMult, 0.0, (bNamedParams) ? strName +
"_w" :
null));
563 biasLayer.
top.Add(biasLayer.
name);
569 convLayer.
parameters.Add(
new ParamSpec(dfLrMult, 1.0, (bNamedParams) ? strName +
"_w" :
null));
570 convLayer.
parameters.Add(
new ParamSpec(dfLrMult * 2, 0.0, (bNamedParams) ? strName +
"_b" :
null));
578 if (scaleLayer !=
null)
581 if (biasLayer !=
null)
587 reluLayer.
name = convLayer.
name +
"_relu";
605 toLayer.
bottom.Add(fromLayer);
607 if (fromLayer2 !=
null)
608 toLayer.
bottom.Add(fromLayer2);
627 toLayer.
bottom.Add(fromLayer.
top[nTopIdx]);
635 toLayer.
top.Add(fromLayer.
top[nTopIdx]);
652 for (
int i = 0; i < rgFromLayer.Count; i++)
654 toLayer.
bottom.Add(rgFromLayer[i].top[0]);
686 conv.
top.Add(strName);
708 pool.
top.Add(strName);
728 protected LayerParameter addVGGBlock(
LayerParameter lastLayer,
int nBlockIdx,
int nConvIdx,
int nNumOutput,
int nConvCount,
bool? bNoPool,
bool bDilatePool =
false,
int nKernelSize = 3,
int nPad = 1,
int nStride = 1,
int nDilation = 1)
730 for (
int i = 0; i < nConvCount; i++)
732 string strConvName =
"conv" + nBlockIdx.
ToString() +
"_" + nConvIdx.ToString();
738 relu.
name =
"relu" + nBlockIdx.ToString();
744 if (!bNoPool.HasValue)
749 string strConvName =
"conv" + nBlockIdx.
ToString() +
"_" + nConvIdx.ToString();
755 string strPoolName =
"pool" + nBlockIdx.ToString();
777 protected LayerParameter addVGGfc(
LayerParameter lastLayer,
int nBlockIdx,
int nConvIdx,
int nNumOutput,
int nDilation,
bool bDilated,
bool bNoPool,
bool bFullConv,
bool bReduced,
bool bDropout)
779 string strConvName =
"conv" + nBlockIdx.
ToString() +
"_" + nConvIdx.ToString();
780 string strPoolName =
"pool" + nBlockIdx.ToString();
843 int nPad = (int)((nKernelSize + (nDilation - 1) * (nKernelSize - 1)) - 1) / 2;
848 relu.
name =
"relu" + nBlockIdx.ToString();
854 dropout.
name =
"dropout6";
865 dropout.
name =
"dropout7";
880 dropout.
name =
"dropout6";
893 dropout.
name =
"dropout7";
915 protected LayerParameter addVGGNetBody(
LayerParameter lastLayer,
bool bNeedFc =
true,
bool bFullConv =
true,
bool bReduced =
true,
bool bDilated =
true,
bool bNoPool =
false,
bool bDropout =
false, List<string> rgstrFreezeLayers =
null,
bool bDilatePool4 =
false)
917 lastLayer =
addVGGBlock(lastLayer, 1, 1, 64, 2, bNoPool,
false, 3, 1, 1);
918 lastLayer =
addVGGBlock(lastLayer, 2, 1, 128, 2, bNoPool,
false, 3, 1, 1);
919 lastLayer =
addVGGBlock(lastLayer, 3, 1, 256, 3, bNoPool,
false, 3, 1, 1);
920 lastLayer =
addVGGBlock(lastLayer, 4, 1, 512, 3, bNoPool, bDilatePool4, 3, 1, 1);
922 int nDilation = (bDilatePool4) ? 2 : 1;
924 int nPad = (int)((nKernelSize + (nDilation - 1) * (nKernelSize - 1)) - 1) / 2;
925 lastLayer =
addVGGBlock(lastLayer, 5, 1, 512, 3,
null,
false, nKernelSize, nPad, 1, nDilation);
928 lastLayer =
addVGGfc(lastLayer, 5, 4, 512, nDilation, bDilated, bNoPool, bFullConv, bReduced, bDropout);
930 if (rgstrFreezeLayers !=
null)
932 foreach (
string strFreezeLayer
in rgstrFreezeLayers)
959 protected LayerParameter addResBody(
LayerParameter lastLayer,
string strBlockName,
int nOut2A,
int nOut2B,
int nOut2C,
int nStride,
bool bUseBranch1,
int nDilation = 1,
bool bNamedParams =
false,
string strLayerPostfix =
"",
Phase phaseExclude =
Phase.NONE)
961 string strConvPrefix =
"res_" + strBlockName;
962 string strConvPostfix =
"";
963 string strBnPrefix =
"bn_" + strBlockName;
964 string strBnPostfix =
"";
965 string strScalePrefix =
"scale_" + strBlockName;
966 string strScalePostfix =
"";
968 string strBranch1 = lastLayer.
name;
970 string strBranchName;
971 string strOutName = lastLayer.
name;
975 strBranchName =
"_br1";
976 lastLayer =
addConvBNLayer(lastLayer.
name, strBranchName,
true,
false, nOut2C, 1, 0, nStride, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
977 strBranch1 = lastLayer.
top[0];
978 strOutName = strBranch1;
982 strBranchName =
"_br2a";
983 lastLayer =
addConvBNLayer(strOutName, strBranchName,
true,
true, nOut2A, 1, 0, nStride, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
984 strOutName = strConvPrefix + strBranchName + strLayerPostfix;
985 strBranchName =
"_br2b";
989 lastLayer =
addConvBNLayer(strOutName, strBranchName,
true,
true, nOut2B, 3, 1, 1, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
993 int nPad = (int)(((3 + (nDilation - 1) * 2) - 1) / 2);
994 lastLayer =
addConvBNLayer(strOutName, strBranchName,
true,
true, nOut2B, 3, nPad, 1, 1, nDilation, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
997 strOutName = strConvPrefix + strBranchName + strLayerPostfix;
998 strBranchName =
"_br2c";
1000 lastLayer =
addConvBNLayer(strOutName, strBranchName,
true,
false, nOut2C, 1, 0, 1, 1, 1, useScale, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
1001 strBranch2 = lastLayer.
top[0];
1004 eltwise.
name =
"res" + strBlockName + strLayerPostfix;
1005 eltwise.
bottom.Add(strBranch1);
1006 eltwise.
bottom.Add(strBranch2);
1007 eltwise.
top.Add(eltwise.
name);
1010 lastLayer = eltwise;
1013 relu.
name = eltwise.
name +
"_relu";
1027 if (phase ==
Phase.NONE)
1048 protected LayerParameter addResNetBody(
string strDataName,
int nBlock3Count = 4,
int nBlock4Count = 23,
bool bUsePool5 =
true,
bool bUseDilationConv5 =
false,
bool bNamedParams =
false,
string strLayerPostfix =
"",
Phase phaseExclude =
Phase.NONE)
1050 string strConvPrefix =
"";
1051 string strConvPostfix =
"";
1052 string strBnPrefix =
"bn_";
1053 string strBnPostfix =
"";
1054 string strScalePrefix =
"scale_";
1055 string strScalePostfix =
"";
1057 LayerParameter lastLayer =
addConvBNLayer(strDataName,
"conv1",
true,
true, 64, 7, 3, 2, 1, 1,
SCALE_BIAS.SCALE, strConvPrefix, strConvPostfix, strBnPrefix, strBnPostfix, strScalePrefix, strScalePostfix,
"",
"_bias", bNamedParams, strLayerPostfix, phaseExclude);
1063 lastLayer =
addResBody(lastLayer,
"2a", 64, 64, 256, 1,
true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1064 lastLayer =
addResBody(lastLayer,
"2b", 64, 64, 256, 1,
false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1065 lastLayer =
addResBody(lastLayer,
"2c", 64, 64, 256, 1,
false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1067 lastLayer =
addResBody(lastLayer,
"3a", 128, 128, 512, 2,
true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1068 for (
int i = 1; i <= nBlock3Count; i++)
1070 lastLayer =
addResBody(lastLayer,
"3b" + i.
ToString(), 128, 128, 512, 1,
false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1073 lastLayer =
addResBody(lastLayer,
"4a", 256, 256, 1024, 2,
true, 1, bNamedParams, strLayerPostfix, phaseExclude);
1074 for (
int i = 1; i <= nBlock4Count; i++)
1076 lastLayer =
addResBody(lastLayer,
"4b" + i.
ToString(), 256, 256, 1024, 1,
false, 1, bNamedParams, strLayerPostfix, phaseExclude);
1081 if (bUseDilationConv5)
1087 lastLayer =
addResBody(lastLayer,
"5a", 512, 512, 2048, nStride,
true, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1088 lastLayer =
addResBody(lastLayer,
"5b", 512, 512, 2048, 1,
false, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1089 lastLayer =
addResBody(lastLayer,
"5c", 512, 512, 2048, 1,
false, nDilation, bNamedParams, strLayerPostfix, phaseExclude);
1107 get {
return m_net; }
1124 string m_strSourceLayer;
1125 double? m_dfMinSize;
1126 double? m_dfMaxSize;
1127 double? m_dfStepWidth;
1128 double? m_dfStepHeight;
1129 double? m_dfAspectRatioHeight;
1130 double? m_dfAspectRatioWidth;
1131 double? m_dfNormalization;
1132 double? m_nInterLayerDepth;
1146 public MultiBoxHeadInfo(
string strSrcLayer,
double? dfMinSize =
null,
double? dfMaxSize =
null,
double? dfStepWidth =
null,
double? dfStepHeight =
null,
double? dfAspectRatioWidth =
null,
double? dfAspectRatioHeight =
null,
double? dfNormalization =
null,
int? nInterLayerDepth =
null)
1148 m_strSourceLayer = strSrcLayer;
1149 m_dfMinSize = dfMinSize;
1150 m_dfMaxSize = dfMaxSize;
1151 m_dfStepWidth = dfStepWidth;
1152 m_dfStepHeight = dfStepHeight;
1153 m_dfAspectRatioHeight = dfAspectRatioHeight;
1154 m_dfAspectRatioWidth = dfAspectRatioWidth;
1155 m_dfNormalization = dfNormalization;
1156 m_nInterLayerDepth = nInterLayerDepth;
1159 private bool verify(
double? df1,
double? df2)
1161 if ((df1.HasValue && !df2.HasValue) || (!df1.HasValue && df2.HasValue))
1174 if (!verify(m_dfAspectRatioHeight, info.m_dfAspectRatioHeight))
1177 if (!verify(m_dfAspectRatioWidth, info.m_dfAspectRatioWidth))
1180 if (!verify(m_dfMaxSize, info.m_dfMaxSize))
1183 if (!verify(m_dfMinSize, info.m_dfMinSize))
1186 if (!verify(m_dfNormalization, info.m_dfNormalization))
1189 if (!verify(m_dfStepWidth, info.m_dfStepWidth))
1192 if (!verify(m_dfStepHeight, info.m_dfStepHeight))
1195 if (!verify(m_nInterLayerDepth, info.m_nInterLayerDepth))
1206 get {
return m_strSourceLayer; }
1214 get {
return m_dfMinSize; }
1222 get {
return m_dfMaxSize; }
1230 get {
return m_dfStepHeight; }
1238 get {
return m_dfStepWidth; }
1246 get {
return m_dfAspectRatioHeight; }
1254 get {
return m_dfAspectRatioWidth; }
1262 get {
return m_dfNormalization; }
1270 get {
return m_nInterLayerDepth; }
The SimpleDatum class holds a data input within host memory.
ANNOTATION_TYPE
Specifies the annotation type when using annotations.
The ModelBuilder is an abstract class that is overridden by a base class used to programically build ...
SolverParameter Solver
Returns the base solver.
LayerParameter addVGGNetBody(LayerParameter lastLayer, bool bNeedFc=true, bool bFullConv=true, bool bReduced=true, bool bDilated=true, bool bNoPool=false, bool bDropout=false, List< string > rgstrFreezeLayers=null, bool bDilatePool4=false)
Adds the full VGG body to the network, connecting it to the 'lastLayer'.
SolverParameter m_solver
Specifies the base solver to use.
void addExclusion(LayerParameter p, Phase phase)
Add a phase exclusion.
abstract LayerParameter addExtraLayers(bool bUseBatchNorm=true, double dfLrMult=1.0)
Add extra layers on top of a 'base' network (e.g. VGGNet or Inception)
SCALE_BIAS
Defines the scale bias type to use.
LayerParameter addResNetBody(string strDataName, int nBlock3Count=4, int nBlock4Count=23, bool bUsePool5=true, bool bUseDilationConv5=false, bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Create a ResNet101 Body.
LayerParameter addAnnotatedDataLayer(string strSource, Phase phase, int nBatchSize=32, bool bOutputLabel=true, string strLabelMapFile="", SimpleDatum.ANNOTATION_TYPE anno_type=SimpleDatum.ANNOTATION_TYPE.NONE, TransformationParameter transform=null, List< BatchSampler > rgSampler=null)
Add the Annotated Data layer.
NetParameter m_net
Specifies the base net to be altered.
LayerParameter createPooling(string strName, PoolingParameter.PoolingMethod method, int nKernelSize, int nPad=0, int nStride=1)
Create a new pooling layer parameter.
NetParameter Net
Returns the base net altered by the model builder.
LayerParameter addVGGBlock(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nConvCount, bool? bNoPool, bool bDilatePool=false, int nKernelSize=3, int nPad=1, int nStride=1, int nDilation=1)
Add a new VGG block.
ModelBuilder(string strBaseDir, NetParameter net=null, SolverParameter solver=null)
The constructor.
List< LayerParameter > createMultiBoxHead(LayerParameter data, int nNumClasses, List< MultiBoxHeadInfo > rgInfo, List< float > rgPriorVariance, bool bUseObjectness=false, bool bUseBatchNorm=true, double dfLrMult=1.0, bool useScale=true, int nImageHt=0, int nImageWd=0, bool bShareLocation=true, bool bFlip=true, bool bClip=true, double dfOffset=0.5, int nKernelSize=1, int nPad=0, string strConfPostfix="", string strLocPostfix="")
Create the multi-box head layers.
abstract NetParameter CreateDeployModel()
Create the deploy model to use.
LayerParameter createConvolution(string strName, int nNumOutput, int nKernelSize, int nPad=0, int nStride=1, int nDilation=1)
Create a new convolution layer parameter.
LayerParameter addConvBNLayer(string strInputLayer, string strOutputLayer, bool bUseBatchNorm, bool bUseRelU, int nNumOutput, int nKernelSize, int nPad, int nStride, double dfLrMult=1.0, int nDilation=1, SCALE_BIAS useScale=SCALE_BIAS.SCALE, string strConvPrefix="", string strConvPostfix="", string strBnPrefix="", string strBnPostfix="_bn", string strScalePrefix="", string strScalePostFix="_scale", string strBiasPrefix="", string strBiasPostfix="_bias", bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Add convolution, batch-norm layers.
string m_strBaseDir
Specifies the base directory that contains the data and models.
LayerParameter connectAndAddLayer(List< LayerParameter > rgFromLayer, LayerParameter toLayer, bool bAdd=true)
Connect the from layer to the 'to' layer.
NetParameter createNet(string strName)
Create the base network parameter for the model and set its name to the 'm_strModel' name.
LayerParameter addDataLayer(string strSource, Phase phase, int nBatchSize=32, bool bOutputLabel=true, TransformationParameter transform=null, string strName="data", bool bSiamese=false)
Add the Data layer.
LayerParameter findLayer(string strName)
Find a layer with a given name.
LayerParameter addVGGfc(LayerParameter lastLayer, int nBlockIdx, int nConvIdx, int nNumOutput, int nDilation, bool bDilated, bool bNoPool, bool bFullConv, bool bReduced, bool bDropout)
Adds the final layers to the network.
abstract NetParameter CreateModel(bool bDeploy=false)
Create the training/testing/deploy model to use.
LayerParameter addResBody(LayerParameter lastLayer, string strBlockName, int nOut2A, int nOut2B, int nOut2C, int nStride, bool bUseBranch1, int nDilation=1, bool bNamedParams=false, string strLayerPostfix="", Phase phaseExclude=Phase.NONE)
Adds a ResNet body to the network, connecting it to the 'lastLayer'.
LayerParameter connectAndAddLayer(string fromLayer, LayerParameter toLayer, string fromLayer2=null)
Connect the from layer to the 'to' layer.
abstract SolverParameter CreateSolver()
Create the base solver to use.
string getFileName(string strFile, string strSubDir)
Returns the full path of the filename using the base directory original set when creating the ModelBu...
LayerParameter connectAndAddLayer(LayerParameter fromLayer, LayerParameter toLayer, bool bInPlace=false, bool bAdd=true, int nTopIdx=0)
Connect the from layer to the 'to' layer.
The MultiBoxHeadInfo contains information used to build the multi-box head of layers.
MultiBoxHeadInfo(string strSrcLayer, double? dfMinSize=null, double? dfMaxSize=null, double? dfStepWidth=null, double? dfStepHeight=null, double? dfAspectRatioWidth=null, double? dfAspectRatioHeight=null, double? dfNormalization=null, int? nInterLayerDepth=null)
The constructor.
double? InterLayerDepth
Returns the inner layer depth, or -1 to ignore.
double? MaxSize
Returns the bbox maximum size.
double? StepWidth
Returns the step width.
string SourceLayer
Returns the source layer.
double? Normalization
Returns the normalization, or -1 to ignore.
double? AspectRatioWidth
Returns the aspect ratio width.
double? MinSize
Returns the bbox minimum size.
bool Verify(MultiBoxHeadInfo info)
Verify that all numical values are consistently set (or not) between two info objects.
double? AspectRatioHeight
Returns the aspect ratio height.
double? StepHeight
Returns the step eight.
double eps
Specifies a small value to add to the variance estimate so that we don't divide by zero.
double moving_average_fraction
Specifies how much the moving average decays each iteration. Smaller values make the moving average d...
bool? use_global_stats
If false, normalization is performed over the current mini-batch and global statistics are accumulate...
FillerParameter filler
(filler is ignored unless just one bottom is given and the bias is a learned parameter of the layer....
int axis
The axis along which to concatenate – may be negative to index from the end (e.g.,...
FillerParameter weight_filler
The filler for the weight. The default is set to use the 'xavier' filler.
FillerParameter bias_filler
The filler for the bias. The default is set to use the 'constant = 0.1' filler.
bool bias_term
Whether to have bias terms or not.
uint num_output
The number of outputs for the layer.
virtual uint batch_size
Specifies the batch size.
int images_per_blob
(optional, default = 1) Specifies the number of images to load into each blob channel....
string source
When used with the DATA parameter, specifies the data 'source' within the database....
bool output_all_labels
(optional, default = false) When using images_per_blob > 1, 'output_all_labels' specifies to output a...
bool balance_matches
(optional, default = true) When using images_per_blob > 1, 'balance_matches' specifies to query image...
double dropout_ratio
Specifies the dropout ratio. (e.g. the probability that values will be dropped out and set to zero....
Specifies the filler parameters used to create each Filler.
int axis
Specifies the first axis to flatten: all preceding axes are retained in the output....
uint num_output
The number of outputs for the layer.
List< uint > kernel_size
Kernel size is given as a single value for equal dimensions in all spatial dimensions,...
List< uint > dilation
Factor used to dilate the kernel, (implicitly) zero-filling the resulting holes. (Kernel dilation is ...
List< uint > stride
Stride is given as a single value for equal dimensions in all spatial dimensions, or once per spatial...
List< uint > pad
Pad is given as a single value for equal dimensions in all spatial dimensions, or once per spatial di...
Specifies the base parameter for all layers.
Normalization2Parameter normalization2_param
Returns the parameter set when initialized with LayerType.NORMALIZATION2
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
List< ParamSpec > parameters
Specifies the ParamSpec parameters of the LayerParameter.
string name
Specifies the name of this LayerParameter.
List< NetStateRule > include
Specifies the NetStateRule's for which this LayerParameter should be included.
ScaleParameter scale_param
Returns the parameter set when initialized with LayerType.SCALE
bool freeze_learning
Get/set whether or not to freeze the learning for this layer globally.
List< NetStateRule > exclude
Specifies the NetStateRule's for which this LayerParameter should be excluded.
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
PermuteParameter permute_param
Returns the parameter set when initialized with LayerType.PERMUTE
PriorBoxParameter prior_box_param
Returns the parameter set when initialized with LayerType.PRIORBOX
List< string > top
Specifies the active top connections (in the bottom, out the top)
AnnotatedDataParameter annotated_data_param
Returns the parameter set when initialized with LayerType.ANNOTATED_DATA
BiasParameter bias_param
Returns the parameter set when initialized with LayerType.BIAS
InnerProductParameter inner_product_param
Returns the parameter set when initialized with LayerType.INNERPRODUCT
ConcatParameter concat_param
Returns the parameter set when initialized with LayerType.CONCAT
TransformationParameter transform_param
Returns the parameter set when initialized with LayerType.TRANSFORM
DataParameter data_param
Returns the parameter set when initialized with LayerType.DATA
FlattenParameter flatten_param
Returns the parameter set when initialized with LayerType.FLATTEN
BatchNormParameter batch_norm_param
Returns the parameter set when initialized with LayerType.BATCHNORM
List< string > bottom
Specifies the active bottom connections (in the bottom, out the top).
LayerType
Specifies the layer type.
override string ToString()
Returns a string representation of the LayerParameter.
DropoutParameter dropout_param
Returns the parameter set when initialized with LayerType.DROPOUT
Specifies the parameters use to create a Net
string name
The name of the network.
List< LayerParameter > layer
The layers that make up the net. Each of their configurations, including connectivity and behavior,...
Specifies a NetStateRule used to determine whether a Net falls within a given include or exclude patt...
Specifies training parameters (multipliers on global learning constants, and the name of other settin...
Specifies the parameters for the PoolingLayer.
PoolingMethod
Defines the pooling method.
PoolingMethod pool
Specifies the pooling method.
bool global_pooling
Specifies whether or not to enable global pooling.
bool bias_term
Whether to also learn a bias (equivalent to a ScalarLayer + BiasLayer, but may be more efficient).
FillerParameter bias_filler
Filler used for bias filling.
The SolverParameter is a parameter for the solver, specifying the train and test networks.
SimpleDatum.ANNOTATION_TYPE anno_type
Get/set the annotation type.
List< BatchSampler > batch_sampler
Get/set the batch sampler.
string label_map_file
Get/set the label map file.
FillerParameter scale_filler
Specifies the filler for the initial value of scale, default is 1.0 for all.
bool channel_shared
Specifies whether or not the scale parameters are shared across channels.
bool across_spatial
Specifies to normalize across the spatial dimensions.
List< int > order
Specifies the new orders of the axes of data.
float offset
Specifies the offset to the top left corner of each cell.
uint? img_size
Specifies the image size. By default we calculate the img_height, img_width, step_x and step_y based ...
uint? img_h
Specifies the image height. By default we calculate the img_height, img_width, step_x and step_y base...
List< float > variance
Specifies the variance for adjusting the prior boxes.
List< float > max_size
Specifies the maximum box size (in pixels) and is required!
float? step_w
Specifies the explicit step size to use along width.
uint? img_w
Specifies the image width. By default we calculate the img_height, img_width, step_x and step_y based...
float? step
Specifies the excplicit step size to use.
float? step_h
Specifies the explicit step size to use along height.
List< float > aspect_ratio
Specifies various aspect ratios. Duplicate ratios are ignored. If none are provided,...
bool clip
Specifies whether or not to clip the prior so that it is within [0,1].
List< float > min_size
Specifies the minimum box size (in pixels) and is required!
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Phase
Defines the Phase under which to run a Net.
@ NONE
No training category specified.
The MyCaffe.model namespace contains all classes used to programically create new model scripts.
The MyCaffe.param.ssd namespace contains all SSD related parameter objects that correspond to the nat...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...