2using System.Collections.Generic;
22 [TypeConverter(typeof(ExpandableObjectConverter))]
34 List<string> m_rgstrBottom =
new List<string>();
36 List<string> m_rgstrTop =
new List<string>();
38 bool m_bGroupStart =
false;
40 bool m_bUseHalfSize =
false;
48 List<double> m_rgLossWeight =
new List<double>();
52 List<ParamSpec> m_rgParams =
new List<ParamSpec>();
57 List<BlobProto> m_rgBlobs =
new List<BlobProto>();
66 List<bool> m_rgbPropagateDown =
new List<bool>();
76 List<NetStateRule> m_rgInclude =
new List<NetStateRule>();
77 List<NetStateRule> m_rgExclude =
new List<NetStateRule>();
78 Dictionary<Phase, int> m_rgMaxBottomCount =
new Dictionary<Phase, int>();
80 int m_nSolverCount = 1;
81 int m_nSolverRank = 0;
82 List<string> m_rgstrExpectedTop =
new List<string>();
83 List<string> m_rgstrExpectedBottom =
new List<string>();
84 bool m_bFreezeLearning =
false;
85 bool m_bConnectLossEvent =
false;
103 INFERENCE_AND_TRAINING
162 CAUSAL_SELF_ATTENTION,
406 MULTIHEAD_ATTENTION_INTERP,
410 MULTINOMIALLOGISTIC_LOSS,
498 SIGMOIDCROSSENTROPY_LOSS,
510 SOFTMAXCROSSENTROPY_LOSS,
514 SOFTMAXCROSSENTROPY2_LOSS,
594 TOKENIZED_DATA_PAIRS,
598 TOKENIZED_DATA_PAIRS_PY,
667#pragma warning disable 1591
669#pragma warning restore 1591
678 Dictionary<LayerType, LayerParameterBase> m_rgLayerParameters =
new Dictionary<LayerType, LayerParameterBase>();
683 for (
int i = 0; i < (int)
LayerType._MAX; i++)
685 m_rgLayerParameters.Add((
LayerType)i,
null);
700 if (m_strName ==
null)
701 m_strName = lt.ToString();
703 for (
int i = 0; i < (int)
LayerType._MAX; i++)
705 m_rgLayerParameters.Add((
LayerType)i,
null);
719 m_strName = p.m_strName;
720 m_rgstrBottom = p.m_rgstrBottom;
721 m_rgstrTop = p.m_rgstrTop;
723 m_rgLossWeight = p.m_rgLossWeight;
724 m_rgParams = p.m_rgParams;
725 m_rgBlobs = p.m_rgBlobs;
726 m_rgbPropagateDown = p.m_rgbPropagateDown;
727 m_rgInclude = p.m_rgInclude;
728 m_rgExclude = p.m_rgExclude;
729 m_rgLayerParameters = p.m_rgLayerParameters;
730 m_nSolverCount = p.m_nSolverCount;
731 m_nSolverRank = p.m_nSolverRank;
732 m_bGroupStart = p.m_bGroupStart;
741 if (m_rgLayerParameters[m_type] ==
null)
744 return m_rgLayerParameters[m_type].PrepareRunModelInputs();
752 if (m_rgLayerParameters[m_type] ==
null)
755 m_rgLayerParameters[m_type].PrepareRunModel(
this);
780 return m_rgParams.Count + nOffset;
792 if (p.
type != m_type)
793 throw new ArgumentOutOfRangeException();
847 if (m_rgInclude.Count == 0)
850 if (m_rgExclude.Count > 0)
860 public void Save(BinaryWriter bw)
862 bw.Write((
int)m_type);
883 foreach (KeyValuePair<LayerType, LayerParameterBase> kv
in m_rgLayerParameters)
885 if (kv.Value !=
null)
887 bw.Write((
int)kv.Key);
901 public object Load(BinaryReader br,
bool bNewInstance)
904 string strName = br.ReadString();
911 p.m_rgstrBottom =
Utility.Load<
string>(br);
912 p.m_rgstrTop =
Utility.Load<
string>(br);
913 p.m_rgLossWeight =
Utility.Load<
double>(br);
916 p.m_rgbPropagateDown =
Utility.Load<
bool>(br);
920 int nCount = br.ReadInt32();
922 for (
int i = 0; i < nCount; i++)
932 private void setupParams(
LayerType lt,
bool bNewParams =
true)
1116 if (bNewParams || m_rgLayerParameters[
LayerType.CONVOLUTION] ==
null)
1318 m_rgLayerParameters[
LayerType.DATA] = imgdp;
1359 m_rgMaxBottomCount.Add(
Phase.RUN, 1);
1454 case LayerType.MULTIHEAD_ATTENTION_INTERP:
1465 case LayerType.MULTINOMIALLOGISTIC_LOSS:
1648 case LayerType.SIGMOIDCROSSENTROPY_LOSS:
1666 case LayerType.SOFTMAXCROSSENTROPY_LOSS:
1674 case LayerType.SOFTMAXCROSSENTROPY2_LOSS:
1866 get {
return m_strName; }
1867 set { m_strName = value; }
1875 get {
return m_type; }
1883 get {
return m_bUseHalfSize; }
1884 set { m_bUseHalfSize = value; }
1903 setupParams(
type, bNewParam);
1911 get {
return m_rgstrBottom; }
1912 set { m_rgstrBottom = value; }
1920 get {
return m_rgstrTop; }
1921 set { m_rgstrTop = value; }
1929 get {
return m_phase; }
1930 set { m_phase = value; }
1938 get {
return m_bFreezeLearning; }
1939 set { m_bFreezeLearning = value; }
1947 get {
return m_bConnectLossEvent; }
1948 set { m_bConnectLossEvent = value; }
1956 get {
return m_rgLossWeight; }
1957 set { m_rgLossWeight = value; }
1965 get {
return m_rgParams; }
1966 set { m_rgParams = value; }
1974 get {
return m_rgBlobs; }
1975 set { m_rgBlobs = value; }
1983 get {
return m_rgbPropagateDown; }
1984 set { m_rgbPropagateDown = value; }
1992 get {
return m_rgInclude; }
1993 set { m_rgInclude = value; }
2001 get {
return m_rgExclude; }
2002 set { m_rgExclude = value; }
2010 get {
return m_bGroupStart; }
2011 set { m_bGroupStart = value; }
2020 set { m_rgLayerParameters[
LayerType.TRANSFORM] = value; }
2029 set { m_rgLayerParameters[
LayerType.LOSS] = value; }
2038 set { m_rgLayerParameters[
LayerType.ACCURACY] = value; }
2047 set { m_rgLayerParameters[
LayerType.ARGMAX] = value; }
2056 set { m_rgLayerParameters[
LayerType.BATCHNORM] = value; }
2065 set { m_rgLayerParameters[
LayerType.BIAS] = value; }
2074 set { m_rgLayerParameters[
LayerType.CFC] = value; }
2083 set { m_rgLayerParameters[
LayerType.CFC_UNIT] = value; }
2092 set { m_rgLayerParameters[
LayerType.CLIP] = value; }
2101 set { m_rgLayerParameters[
LayerType.CONCAT] = value; }
2110 set { m_rgLayerParameters[
LayerType.CONSTANT] = value; }
2119 set { m_rgLayerParameters[
LayerType.CONTRASTIVE_LOSS] = value; }
2128 set { m_rgLayerParameters[
LayerType.CONVOLUTION] = value; }
2137 set { m_rgLayerParameters[
LayerType.CONVOLUTION_OCTAVE] = value; }
2146 set { m_rgLayerParameters[
LayerType.CROP] = value; }
2155 set { m_rgLayerParameters[
LayerType.DECODE] = value; }
2164 set { m_rgLayerParameters[
LayerType.ANNOTATED_DATA] = value; }
2174 set { m_rgLayerParameters[
LayerType.ATTENTION] = value; }
2184 set { m_rgLayerParameters[
LayerType.CATEGORICAL_TRANS] = value; }
2193 set { m_rgLayerParameters[
LayerType.CAUSAL_SELF_ATTENTION] = value; }
2202 set { m_rgLayerParameters[
LayerType.MULTIHEAD_ATTENTION] = value; }
2211 set { m_rgLayerParameters[
LayerType.MULTIHEAD_ATTENTION_INTERP] = value; }
2220 set { m_rgLayerParameters[
LayerType.POSITIONAL_ENCODER] = value; }
2229 set { m_rgLayerParameters[
LayerType.DETECTION_EVALUATE] = value; }
2238 set { m_rgLayerParameters[
LayerType.DETECTION_OUTPUT] = value; }
2247 set { m_rgLayerParameters[
LayerType.DATA] = value; }
2256 set { m_rgLayerParameters[
LayerType.DATA_NORMALIZER] = value; }
2265 set { m_rgLayerParameters[
LayerType.DATA_SEQUENCE] = value; }
2274 set { m_rgLayerParameters[
LayerType.DATA_TEMPORAL] = value; }
2283 set { m_rgLayerParameters[
LayerType.DEBUG] = value; }
2292 set { m_rgLayerParameters[
LayerType.DROPOUT] = value; }
2301 set { m_rgLayerParameters[
LayerType.DUMMYDATA] = value; }
2310 set { m_rgLayerParameters[
LayerType.ELTWISE] = value; }
2319 set { m_rgLayerParameters[
LayerType.ELU] = value; }
2328 set { m_rgLayerParameters[
LayerType.EMBED] = value; }
2337 set { m_rgLayerParameters[
LayerType.EXP] = value; }
2346 set { m_rgLayerParameters[
LayerType.FLATTEN] = value; }
2355 set { m_rgLayerParameters[
LayerType.GATHER] = value; }
2364 set { m_rgLayerParameters[
LayerType.GATEADDNORM] = value; }
2373 set { m_rgLayerParameters[
LayerType.GELU] = value; }
2382 set { m_rgLayerParameters[
LayerType.GLU] = value; }
2391 set { m_rgLayerParameters[
LayerType.GRN] = value; }
2400 set { m_rgLayerParameters[
LayerType.GRADIENTSCALER] = value; }
2409 set { m_rgLayerParameters[
LayerType.GRAM] = value; }
2418 set { m_rgLayerParameters[
LayerType.HDF5_DATA] = value; }
2427 set { m_rgLayerParameters[
LayerType.HINGE_LOSS] = value; }
2436 set { m_rgLayerParameters[
LayerType.IMAGE_DATA] = value; }
2445 set { m_rgLayerParameters[
LayerType.INFOGAIN_LOSS] = value; }
2454 set { m_rgLayerParameters[
LayerType.INNERPRODUCT] = value; }
2463 set { m_rgLayerParameters[
LayerType.INTERP] = value; }
2472 set { m_rgLayerParameters[
LayerType.KNN] = value; }
2481 set { m_rgLayerParameters[
LayerType.LABELMAPPING] = value; }
2490 set { m_rgLayerParameters[
LayerType.LAYERNORM] = value; }
2499 set { m_rgLayerParameters[
LayerType.LOG] = value; }
2508 set { m_rgLayerParameters[
LayerType.LRN] = value; }
2517 set { m_rgLayerParameters[
LayerType.LTC_UNIT] = value; }
2526 set { m_rgLayerParameters[
LayerType.MEAN_ERROR_LOSS] = value; }
2535 set { m_rgLayerParameters[
LayerType.MATH] = value; }
2544 set { m_rgLayerParameters[
LayerType.MERGE] = value; }
2553 set { m_rgLayerParameters[
LayerType.MEMORYDATA] = value; }
2562 set { m_rgLayerParameters[
LayerType.MISH] = value; }
2571 set { m_rgLayerParameters[
LayerType.MULTIBOX_LOSS] = value; }
2580 set { m_rgLayerParameters[
LayerType.MVN] = value; }
2589 set { m_rgLayerParameters[
LayerType.NLL_LOSS] = value; }
2598 set { m_rgLayerParameters[
LayerType.NUMERIC_TRANS] = value; }
2607 set { m_rgLayerParameters[
LayerType.ONEHOT] = value; }
2616 set { m_rgLayerParameters[
LayerType.NORMALIZATION1] = value; }
2625 set { m_rgLayerParameters[
LayerType.NORMALIZATION2] = value; }
2634 set { m_rgLayerParameters[
LayerType.POOLING] = value; }
2643 set { m_rgLayerParameters[
LayerType.UNPOOLING] = value; }
2652 set { m_rgLayerParameters[
LayerType.PARAMETER] = value; }
2661 set { m_rgLayerParameters[
LayerType.PERMUTE] = value; }
2670 set { m_rgLayerParameters[
LayerType.POWER] = value; }
2679 set { m_rgLayerParameters[
LayerType.PRELU] = value; }
2688 set { m_rgLayerParameters[
LayerType.PRIORBOX] = value; }
2697 set { m_rgLayerParameters[
LayerType.QUANTILE_ACCURACY] = value; }
2706 set { m_rgLayerParameters[
LayerType.QUANTILE_LOSS] = value; }
2715 set { m_rgLayerParameters[
LayerType.REDUCTION] = value; }
2724 set { m_rgLayerParameters[
LayerType.RELU] = value; }
2733 set { m_rgLayerParameters[
LayerType.RESHAPE] = value; }
2742 set { m_rgLayerParameters[
LayerType.RESHAPE_TEMPORAL] = value; }
2751 set { m_rgLayerParameters[
LayerType.SQUEEZE] = value; }
2760 set { m_rgLayerParameters[
LayerType.SCALAR] = value; }
2769 set { m_rgLayerParameters[
LayerType.SCALE] = value; }
2778 set { m_rgLayerParameters[
LayerType.SERF] = value; }
2787 set { m_rgLayerParameters[
LayerType.SIGMOID] = value; }
2796 set { m_rgLayerParameters[
LayerType.SOFTMAX] = value; }
2805 set { m_rgLayerParameters[
LayerType.SPP] = value; }
2814 set { m_rgLayerParameters[
LayerType.SLICE] = value; }
2824 set { m_rgLayerParameters[
LayerType.SWISH] = value; }
2833 set { m_rgLayerParameters[
LayerType.TANH] = value; }
2842 set { m_rgLayerParameters[
LayerType.MODEL_DATA] = value; }
2851 set { m_rgLayerParameters[
LayerType.TEXT_DATA] = value; }
2860 set { m_rgLayerParameters[
LayerType.THRESHOLD] = value; }
2869 set { m_rgLayerParameters[
LayerType.TILE] = value; }
2878 set { m_rgLayerParameters[
LayerType.TRANSPOSE] = value; }
2887 set { m_rgLayerParameters[
LayerType.TRANSFORMER_BLOCK] = value; }
2896 set { m_rgLayerParameters[
LayerType.TOKENIZED_DATA] = value; }
2905 set { m_rgLayerParameters[
LayerType.TOKENIZED_DATA_PAIRS] = value; }
2914 set { m_rgLayerParameters[
LayerType.TRIPLET_LOSS] = value; }
2923 set { m_rgLayerParameters[
LayerType.TV_LOSS] = value; }
2932 set { m_rgLayerParameters[
LayerType.LSTM_SIMPLE] = value; }
2941 set { m_rgLayerParameters[
LayerType.LSTM_ATTENTION] = value; }
2950 set { m_rgLayerParameters[
LayerType.RECURRENT] = value; }
2959 set { m_rgLayerParameters[
LayerType.INPUT] = value; }
2968 set { m_rgLayerParameters[
LayerType.VIDEO_DATA] = value; }
2977 set { m_rgLayerParameters[
LayerType.VARSELNET] = value; }
2993 get {
return m_nSolverCount; }
2994 set { m_nSolverCount = value; }
3002 get {
return m_nSolverRank; }
3003 set { m_nSolverRank = value; }
3011 get {
return m_rgstrExpectedTop; }
3019 get {
return m_rgstrExpectedBottom; }
3028 m_rgLayerParameters =
new Dictionary<LayerType, LayerParameterBase>();
3030 foreach (KeyValuePair<LayerType, LayerParameterBase> kv
in src.m_rgLayerParameters)
3032 if (kv.Value !=
null)
3033 m_rgLayerParameters.Add(kv.Key, kv.Value.Clone());
3035 m_rgLayerParameters.Add(kv.Key,
null);
3047 p.m_rgstrBottom =
Utility.Clone<
string>(m_rgstrBottom);
3048 p.m_rgstrTop =
Utility.Clone<
string>(m_rgstrTop);
3049 p.m_phase = m_phase;
3050 p.m_rgLossWeight =
Utility.Clone<
double>(m_rgLossWeight);
3056 p.m_rgbPropagateDown =
Utility.Clone<
bool>(m_rgbPropagateDown);
3059 p.m_bFreezeLearning = m_bFreezeLearning;
3060 p.m_bConnectLossEvent = m_bConnectLossEvent;
3062 p.m_rgLayerParameters =
new Dictionary<LayerType, LayerParameterBase>();
3064 foreach (KeyValuePair<LayerType, LayerParameterBase> kv
in m_rgLayerParameters)
3066 if (kv.Value !=
null)
3067 p.m_rgLayerParameters.Add(kv.Key, kv.Value.Clone());
3069 p.m_rgLayerParameters.Add(kv.Key,
null);
3072 p.m_nSolverCount = m_nSolverCount;
3073 p.m_nSolverRank = m_nSolverRank;
3074 p.m_bGroupStart = m_bGroupStart;
3075 p.m_bUseHalfSize = m_bUseHalfSize;
3084 object ICloneable.
Clone()
3114 return "AccuracyDecode";
3117 return "AccuracyEncoding";
3123 return "AnnotatedData";
3132 return "BatchReIndex";
3141 return "CategoricalTrans";
3144 return "CausalSelfAttention";
3153 return "ChannelEmbedding";
3165 return "ContrastiveLoss";
3168 return "Convolution";
3171 return "ConvolutionOctave";
3186 return "DataNormalizer";
3189 return "DataSequence";
3192 return "DataTemporal";
3198 return "Deconvolution";
3201 return "DetectionEvaluate";
3204 return "DetectionOutput";
3222 return "EuclideanLoss";
3240 return "GateAddNorm";
3252 return "GlobResNorm";
3273 return "InfogainLoss";
3276 return "InnerProduct";
3288 return "LabelMapping";
3309 return "MeanErrorLoss";
3318 return "MemoryData";
3321 return "MultiBoxLoss";
3324 return "MultiheadAttention";
3326 case LayerType.MULTIHEAD_ATTENTION_INTERP:
3327 return "MultiheadAttentionInterp";
3330 return "MemoryLoss";
3335 case LayerType.MULTINOMIALLOGISTIC_LOSS:
3336 return "MultinomialLogisticLoss";
3345 return "NumericTrans";
3351 return "Normalization1";
3354 return "Normalization2";
3363 return "PositionalEncoder";
3369 return "UnPooling1";
3384 return "QuantileAccuracy";
3387 return "QuantileLoss";
3399 return "ReshapeTemporal";
3419 case LayerType.SIGMOIDCROSSENTROPY_LOSS:
3420 return "SigmoidCrossEntropyLoss";
3428 case LayerType.SOFTMAXCROSSENTROPY_LOSS:
3429 return "SoftmaxCrossEntropyLoss";
3431 case LayerType.SOFTMAXCROSSENTROPY2_LOSS:
3432 return "SoftmaxCrossEntropy2Loss";
3444 return "SoftmaxWithLoss";
3447 return "SmoothL1Loss";
3477 return "TransformerBlock";
3480 return "TokenizedData";
3483 return "TokenizedDataPairs";
3486 return "TokenizedDataPairsPy";
3489 return "TripletLoss";
3496 return "LstmSimple";
3499 return "LstmAttention";
3528 rgChildren.
Add<
string>(
"bottom",
bottom);
3529 rgChildren.
Add<
string>(
"top",
top);
3546 rgChildren.Add(ps.
ToProto(
"param"));
3551 rgChildren.Add(bp.
ToProto(
"blobs"));
3558 rgChildren.Add(nsr.
ToProto(
"include"));
3563 rgChildren.Add(nsr.
ToProto(
"exclude"));
3566 foreach (KeyValuePair<Phase, int> kv
in m_rgMaxBottomCount)
3569 prChildren.
Add(
"phase", kv.Key.ToString());
3570 prChildren.
Add(
"count", kv.Value.ToString());
3571 RawProto prMaxBottomCount =
new RawProto(
"max_bottom_count",
"", prChildren);
3572 rgChildren.Add(prMaxBottomCount);
3575 List<KeyValuePair<BaseParameter, string>> rgParam =
new List<KeyValuePair<BaseParameter,string>>();
3578 rgParam.Add(
new KeyValuePair<BaseParameter,string>(
transform_param,
"transform_param"));
3579 rgParam.Add(
new KeyValuePair<BaseParameter,string>(
loss_param,
"loss_param"));
3580 rgParam.Add(
new KeyValuePair<BaseParameter,string>(
accuracy_param,
"accuracy_param"));
3581 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
argmax_param,
"argmax_param"));
3582 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
batch_norm_param,
"batch_norm_param"));
3583 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
bias_param,
"bias_param"));
3584 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
clip_param,
"clip_param"));
3585 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
concat_param,
"concat_param"));
3586 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
constant_param,
"constant_param"));
3588 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
convolution_param,
"convolution_param"));
3589 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
crop_param,
"crop_param"));
3590 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
data_param,
"data_param"));
3591 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
debug_param,
"debug_param"));
3592 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
dropout_param,
"dropout_param"));
3593 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
dummy_data_param,
"dummy_data_param"));
3594 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
eltwise_param,
"eltwise_param"));
3595 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
elu_param,
"elu_param"));
3596 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
embed_param,
"embed_param"));
3597 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
exp_param,
"exp_param"));
3598 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
flatten_param,
"flatten_param"));
3599 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
gradient_scale_param,
"gradient_scale_param"));
3600 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
hinge_loss_param,
"hinge_loss_param"));
3601 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
image_data_param,
"image_data_param"));
3602 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
infogain_loss_param,
"infogain_loss_param"));
3603 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
inner_product_param,
"inner_product_param"));
3604 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
input_param,
"input_param"));
3605 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
labelmapping_param,
"labelmapping_param"));
3606 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
log_param,
"log_param"));
3607 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
lrn_param,
"lrn_param"));
3608 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
memory_data_param,
"memory_data_param"));
3609 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
mvn_param,
"mvn_param"));
3610 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
pooling_param,
"pooling_param"));
3611 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
parameter_param,
"parameter_param"));
3612 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
power_param,
"power_param"));
3613 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
prelu_param,
"prelu_param"));
3614 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
reduction_param,
"reduction_param"));
3615 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
relu_param,
"relu_param"));
3616 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
reshape_param,
"reshape_param"));
3617 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
scale_param,
"scale_param"));
3618 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
sigmoid_param,
"sigmoid_param"));
3619 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
softmax_param,
"softmax_param"));
3620 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
spp_param,
"spp_param"));
3621 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
slice_param,
"slice_param"));
3622 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
swish_param,
"swish_param"));
3623 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
tanh_param,
"tanh_param"));
3624 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
threshold_param,
"threshold_param"));
3625 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
tile_param,
"tile_param"));
3626 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
lstm_simple_param,
"lstm_simple_param"));
3627 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
recurrent_param,
"recurrent_param"));
3632 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
attention_param,
"attention_param"));
3634 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
data_sequence_param,
"data_sequence_param"));
3635 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
decode_param,
"decode_param"));
3636 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
gather_param,
"gather_param"));
3637 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
interp_param,
"interp_param"));
3638 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
knn_param,
"knn_param"));
3639 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
lstm_attention_param,
"lstm_attention_param"));
3640 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
mean_error_loss_param,
"mean_error_loss_param"));
3641 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
merge_param,
"merge_param"));
3642 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
mish_param,
"mish_param"));
3643 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
normalization1_param,
"normalization_param"));
3644 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
serf_param,
"serf_param"));
3645 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
squeeze_param,
"squeeze_param"));
3646 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
model_data_param,
"model_data_param"));
3647 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
text_data_param,
"text_data_param"));
3648 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
triplet_loss_param,
"triplet_loss_param"));
3649 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
unpooling_param,
"unpooling_param"));
3650 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
transpose_param,
"transpose_param"));
3653 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
hdf5_data_param,
"hdf5_data_param"));
3659 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
gelu_param,
"gelu_param"));
3660 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
layer_norm_param,
"layer_norm_param"));
3662 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
tokenized_data_param,
"tokenized_data_param"));
3664 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
nll_loss_param,
"nll_loss_param"));
3667 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
data_temporal_param,
"data_temporal_param"));
3669 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
numeric_trans_param,
"numeric_trans_param"));
3670 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
gateaddnorm_param,
"gateaddnorm_param"));
3671 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
glu_param,
"glu_param"));
3672 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
grn_param,
"grn_param"));
3673 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
varselnet_param,
"varselnet_param"));
3676 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
quantile_loss_param,
"quantile_loss_param"));
3680 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
cfc_param,
"cfc_param"));
3681 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
cfc_unit_param,
"cfc_unit_param"));
3682 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
ltc_unit_param,
"ltc_unit_param"));
3685 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
gram_param,
"gram_param"));
3686 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
onehot_param,
"onehot_param"));
3687 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
scalar_param,
"scalar_param"));
3688 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
tv_loss_param,
"tv_loss_param"));
3691 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
annotated_data_param,
"annotated_data_param"));
3694 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
multiboxloss_param,
"multiboxloss_param"));
3695 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
normalization2_param,
"normalization2_param"));
3696 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
permute_param,
"permute_param"));
3697 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
prior_box_param,
"prior_box_param"));
3698 rgParam.Add(
new KeyValuePair<BaseParameter, string>(
video_data_param,
"video_data_param"));
3700 foreach (KeyValuePair<BaseParameter, string> kv
in rgParam)
3703 rgChildren.Add(kv.Key.ToProto(kv.Value));
3706 return new RawProto(strName,
"", rgChildren);
3717 string strName =
null;
3720 if ((strVal = rp.
FindValue(
"name")) !=
null)
3723 if ((strVal = rp.
FindValue(
"type")) ==
null)
3724 throw new Exception(
"No layer type specified!");
3726 layerType = parseLayerType(strVal);
3730 p.
bottom = rp.FindArray<
string>(
"bottom");
3731 for (
int i = 0; i < p.
bottom.Count; i++)
3735 p.
top = rp.FindArray<
string>(
"top");
3736 for (
int i = 0; i < p.
top.Count; i++)
3738 p.
top[i] = p.
top[i].Trim(
'\"',
' ');
3741 if ((strVal = rp.
FindValue(
"phase")) !=
null)
3742 p.
phase = parsePhase(strVal);
3744 p.
loss_weight = rp.FindArray<
double>(
"loss_weight");
3746 if ((strVal = rp.
FindValue(
"group_start")) !=
null)
3749 if ((strVal = rp.
FindValue(
"freeze_learning")) !=
null)
3752 if ((strVal = rp.
FindValue(
"connect_loss_event")) !=
null)
3755 if ((strVal = rp.
FindValue(
"use_halfsize")) !=
null)
3766 rgrp = rp.FindChildren(
"blobs");
3772 p.propagate_down = rp.FindArray<
bool>(
"propagate_down");
3774 rgrp = rp.FindChildren(
"include");
3780 rgrp = rp.FindChildren(
"exclude");
3786 rgrp = rp.FindChildren(
"max_bottom_count");
3790 if (prPhase !=
null)
3793 if (!p.m_rgMaxBottomCount.ContainsKey(
phase))
3796 if (prCount !=
null)
3797 p.m_rgMaxBottomCount.Add(
phase,
int.Parse(prCount.
Value));
3805 if ((rpp = rp.
FindChild(
"transform_param")) !=
null)
3808 if ((rpp = rp.
FindChild(
"loss_param")) !=
null)
3811 if ((rpp = rp.
FindChild(
"accuracy_param")) !=
null)
3814 if ((rpp = rp.
FindChild(
"argmax_param")) !=
null)
3817 if ((rpp = rp.
FindChild(
"batch_norm_param")) !=
null)
3820 if ((rpp = rp.
FindChild(
"bias_param")) !=
null)
3823 if ((rpp = rp.
FindChild(
"clip_param")) !=
null)
3826 if ((rpp = rp.
FindChild(
"concat_param")) !=
null)
3829 if ((rpp = rp.
FindChild(
"constant_param")) !=
null)
3832 if ((rpp = rp.
FindChild(
"contrastive_loss_param")) !=
null)
3835 if ((rpp = rp.
FindChild(
"convolution_param")) !=
null)
3838 if ((rpp = rp.
FindChild(
"convolution_octave_param")) !=
null)
3841 if ((rpp = rp.
FindChild(
"crop_param")) !=
null)
3844 if ((rpp = rp.
FindChild(
"data_param")) !=
null)
3847 if ((rpp = rp.
FindChild(
"debug_param")) !=
null)
3850 if ((rpp = rp.
FindChild(
"dropout_param")) !=
null)
3853 if ((rpp = rp.
FindChild(
"dummy_data_param")) !=
null)
3856 if ((rpp = rp.
FindChild(
"eltwise_param")) !=
null)
3859 if ((rpp = rp.
FindChild(
"elu_param")) !=
null)
3862 if ((rpp = rp.
FindChild(
"embed_param")) !=
null)
3865 if ((rpp = rp.
FindChild(
"exp_param")) !=
null)
3868 if ((rpp = rp.
FindChild(
"flatten_param")) !=
null)
3871 if ((rpp = rp.
FindChild(
"gradient_scale_param")) !=
null)
3874 if ((rpp = rp.
FindChild(
"hinge_loss_param")) !=
null)
3877 if ((rpp = rp.
FindChild(
"image_data_param")) !=
null)
3880 if ((rpp = rp.
FindChild(
"infogain_loss_param")) !=
null)
3883 if ((rpp = rp.
FindChild(
"inner_product_param")) !=
null)
3886 if ((rpp = rp.
FindChild(
"input_param")) !=
null)
3889 if ((rpp = rp.
FindChild(
"labelmapping_param")) !=
null)
3892 if ((rpp = rp.
FindChild(
"log_param")) !=
null)
3895 if ((rpp = rp.
FindChild(
"lrn_param")) !=
null)
3898 if ((rpp = rp.
FindChild(
"mean_error_loss_param")) !=
null)
3901 if ((rpp = rp.
FindChild(
"memory_data_param")) !=
null)
3904 if ((rpp = rp.
FindChild(
"mvn_param")) !=
null)
3907 if ((rpp = rp.
FindChild(
"pooling_param")) !=
null)
3910 if ((rpp = rp.
FindChild(
"parameter_param")) !=
null)
3913 if ((rpp = rp.
FindChild(
"power_param")) !=
null)
3916 if ((rpp = rp.
FindChild(
"prelu_param")) !=
null)
3919 if ((rpp = rp.
FindChild(
"reduction_param")) !=
null)
3922 if ((rpp = rp.
FindChild(
"relu_param")) !=
null)
3925 if ((rpp = rp.
FindChild(
"reshape_param")) !=
null)
3928 if ((rpp = rp.
FindChild(
"scale_param")) !=
null)
3931 if ((rpp = rp.
FindChild(
"sigmoid_param")) !=
null)
3934 if ((rpp = rp.
FindChild(
"softmax_param")) !=
null)
3937 if ((rpp = rp.
FindChild(
"spp_param")) !=
null)
3940 if ((rpp = rp.
FindChild(
"slice_param")) !=
null)
3943 if ((rpp = rp.
FindChild(
"swish_param")) !=
null)
3946 if ((rpp = rp.
FindChild(
"tanh_param")) !=
null)
3949 if ((rpp = rp.
FindChild(
"threshold_param")) !=
null)
3952 if ((rpp = rp.
FindChild(
"tile_param")) !=
null)
3956 if ((rpp = rp.
FindChild(
"lstm_simple_param")) !=
null)
3959 if ((rpp = rp.
FindChild(
"recurrent_param")) !=
null)
3965 if ((rpp = rp.
FindChild(
"attention_param")) !=
null)
3968 if ((rpp = rp.
FindChild(
"data_sequence_param")) !=
null)
3971 if ((rpp = rp.
FindChild(
"decode_param")) !=
null)
3974 if ((rpp = rp.
FindChild(
"gather_param")) !=
null)
3977 if ((rpp = rp.
FindChild(
"interp_param")) !=
null)
3980 if ((rpp = rp.
FindChild(
"knn_param")) !=
null)
3983 if ((rpp = rp.
FindChild(
"lstm_attention_param")) !=
null)
3986 if ((rpp = rp.
FindChild(
"merge_param")) !=
null)
3989 if ((rpp = rp.
FindChild(
"mish_param")) !=
null)
3992 if ((rpp = rp.
FindChild(
"normalization_param")) !=
null)
3995 if ((rpp = rp.
FindChild(
"serf_param")) !=
null)
3998 if ((rpp = rp.
FindChild(
"squeeze_param")) !=
null)
4001 if ((rpp = rp.
FindChild(
"model_data_param")) !=
null)
4004 if ((rpp = rp.
FindChild(
"text_data_param")) !=
null)
4007 if ((rpp = rp.
FindChild(
"triplet_loss_param")) !=
null)
4010 if ((rpp = rp.
FindChild(
"transpose_param")) !=
null)
4013 if ((rpp = rp.
FindChild(
"unpooling_param")) !=
null)
4017 if ((rpp = rp.
FindChild(
"hdf5_data_param")) !=
null)
4021 if ((rpp = rp.
FindChild(
"causal_self_attention_param")) !=
null)
4024 if ((rpp = rp.
FindChild(
"multihead_attention_param")) !=
null)
4027 if ((rpp = rp.
FindChild(
"positional_encoder_param")) !=
null)
4030 if ((rpp = rp.
FindChild(
"gelu_param")) !=
null)
4033 if ((rpp = rp.
FindChild(
"layer_norm_param")) !=
null)
4036 if ((rpp = rp.
FindChild(
"transformer_block_param")) !=
null)
4039 if ((rpp = rp.
FindChild(
"tokenized_data_param")) !=
null)
4042 if ((rpp = rp.
FindChild(
"tokenized_data_pairs_param")) !=
null)
4045 if ((rpp = rp.
FindChild(
"nll_loss_param")) !=
null)
4049 if ((rpp = rp.
FindChild(
"data_temporal_param")) !=
null)
4052 if ((rpp = rp.
FindChild(
"categorical_trans_param")) !=
null)
4055 if ((rpp = rp.
FindChild(
"numeric_trans_param")) !=
null)
4058 if ((rpp = rp.
FindChild(
"gateaddnorm_param")) !=
null)
4061 if ((rpp = rp.
FindChild(
"glu_param")) !=
null)
4064 if ((rpp = rp.
FindChild(
"grn_param")) !=
null)
4067 if ((rpp = rp.
FindChild(
"varselnet_param")) !=
null)
4070 if ((rpp = rp.
FindChild(
"multihead_attention_interp_param")) !=
null)
4073 if ((rpp = rp.
FindChild(
"reshape_temporal_param")) !=
null)
4076 if ((rpp = rp.
FindChild(
"quantile_loss_param")) !=
null)
4079 if ((rpp = rp.
FindChild(
"quantile_accuracy_param")) !=
null)
4084 if ((rpp = rp.
FindChild(
"cfc_param")) !=
null)
4087 if ((rpp = rp.
FindChild(
"cfc_unit_param")) !=
null)
4090 if ((rpp = rp.
FindChild(
"ltc_unit_param")) !=
null)
4094 if ((rpp = rp.
FindChild(
"gram_param")) !=
null)
4097 if ((rpp = rp.
FindChild(
"onehot_param")) !=
null)
4100 if ((rpp = rp.
FindChild(
"scalar_param")) !=
null)
4103 if ((rpp = rp.
FindChild(
"tv_loss_param")) !=
null)
4107 if ((rpp = rp.
FindChild(
"annotated_data_param")) !=
null)
4110 if ((rpp = rp.
FindChild(
"detection_evaluate_param")) !=
null)
4113 if ((rpp = rp.
FindChild(
"detection_output_param")) !=
null)
4116 if ((rpp = rp.
FindChild(
"multiboxloss_param")) !=
null)
4119 if ((rpp = rp.
FindChild(
"normalization2_param")) !=
null)
4122 if ((rpp = rp.
FindChild(
"permute_param")) !=
null)
4125 if ((rpp = rp.
FindChild(
"prior_box_param")) !=
null)
4128 if ((rpp = rp.
FindChild(
"video_data_param")) !=
null)
4134 private static Phase parsePhase(
string strVal)
4151 throw new Exception(
"Unknown 'phase' value: " + strVal);
4164 return parseLayerType(strType);
4172 private static LayerType parseLayerType(
string str)
4174 str = str.ToLower();
4184 case "accuracydecode":
4185 case "accuracy_decode":
4188 case "accuracyencoding":
4189 case "accuracy_encoding":
4195 case "annotateddata":
4204 case "batchreindex":
4213 case "categoricaltrans":
4214 case "categorical_trans":
4220 case "causalselfattention":
4223 case "channelembedding":
4238 case "contrastiveloss":
4239 case "contrastive_loss":
4245 case "convolutionoctave":
4246 case "convolution_octave":
4261 case "datanormalizer":
4262 case "data_normalizer":
4265 case "datasequence":
4266 case "data_sequence":
4269 case "datatemporal":
4270 case "data_temporal":
4276 case "deconvolution":
4279 case "detectionevaluate":
4280 case "detection_evaluate":
4283 case "detectionoutput":
4284 case "detection_output":
4302 case "euclideanloss":
4303 case "euclidean_loss":
4358 case "infogainloss":
4359 case "infogain_loss":
4362 case "innerproduct":
4363 case "inner_product":
4375 case "labelmapping":
4393 case "mean_error_loss":
4394 case "meanerrorloss":
4406 case "multiboxloss":
4407 case "multibox_loss":
4410 case "multiheadattention":
4413 case "multiheadattentioninterp":
4414 return LayerType.MULTIHEAD_ATTENTION_INTERP;
4423 case "multinomiallogisticloss":
4424 case "multinomiallogistic_loss":
4425 return LayerType.MULTINOMIALLOGISTIC_LOSS;
4434 case "numerictrans":
4435 case "numeric_trans":
4441 case "normalization1":
4445 case "normalization2":
4454 case "positionalencoder":
4475 case "quantileaccuracy":
4476 case "quantile_accuracy":
4479 case "quantileloss":
4480 case "quantile_loss":
4492 case "reshapetemporal":
4513 case "sigmoidcrossentropyloss":
4514 case "sigmoidcrossentropy_loss":
4515 return LayerType.SIGMOIDCROSSENTROPY_LOSS;
4523 case "softmaxcrossentropyloss":
4524 case "softmaxcrossentropy_loss":
4525 return LayerType.SOFTMAXCROSSENTROPY_LOSS;
4527 case "softmaxcrossentropy2loss":
4528 case "softmaxcrossentropy2_loss":
4529 return LayerType.SOFTMAXCROSSENTROPY2_LOSS;
4540 case "softmaxwithloss":
4541 case "softmaxwith_loss":
4542 case "softmax_loss":
4545 case "smoothl1loss":
4546 case "smoothl1_loss":
4578 case "transformerblock":
4581 case "tokenizeddata":
4584 case "tokenizeddatapairs":
4587 case "tokenizeddatapairs_py":
4588 case "tokenizeddatapairspy":
4589 return LayerType.TOKENIZED_DATA_PAIRS_PY;
4591 case "triplet_loss":
4607 case "lstmattention":
4608 case "lstm_attention":
4628 throw new Exception(
"Unknown 'layertype' value: " + str);
4640 strOut += m_strName +
" (" + m_type.ToString() +
")";
4641 strOut +=
" btm = " +
Utility.ToString(m_rgstrBottom);
4642 strOut +=
" top = " +
Utility.ToString(m_rgstrTop);
The BaseParameter class is the base class for all other parameter classes.
virtual bool Compare(BaseParameter p)
Compare this parameter to another parameter.
The RawProtoCollection class is a list of RawProto objects.
void Add(RawProto p)
Adds a RawProto to the collection.
The RawProto class is used to parse and output Google prototxt file data.
TYPE
Defines the type of a RawProto node.
string Value
Get/set the value of the node.
RawProto FindChild(string strName)
Searches for a given node.
string FindValue(string strName)
Searches for a falue of a node within this nodes children.
RawProtoCollection FindChildren(params string[] rgstrName)
Searches for all children with a given name in this node's children.
The Utility class provides general utility funtions.
static void Save(BinaryWriter bw, List< double > rg)
Save a list of double to a binary writer.
Specifies the parameters for the AccuracyLayer.
static AccuracyParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ArgMaxLayer
static ArgMaxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
[/b DEPRECIATED, use MultiHeadAttention layers instead.] Specifies the parameters for the AttentionLa...
static AttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the BatchNormLayer.
static new BatchNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the BiasLayer
static BiasParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The BlobProto contains the descripion of a blob.
override RawProto ToProto(string strName)
Converts the BlobProto to a RawProto.
static BlobProto FromProto(RawProto rp)
Parses a new BlobProto from a RawProto.
Stores the parameters used by the ClipLayer
static ClipParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConcatLayer
static ConcatParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConstantLayer.
static ConstantParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ContrastiveLossLayer.
static ContrastiveLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConvolutionOctaveLayer.
static ConvolutionOctaveParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ConvolutionLayer. The default weight filler is set to the XavierFill...
static new ConvolutionParameter FromProto(RawProto rp)
Parse a RawProto into a new instance of the parameter.
bool bias_term
Whether to have bias terms or not.
Specifies the parameters for the MyCaffe.CropLayer.
static CropParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the data normalizer layer.
Specifies the parameter for the data layer.
DB
Defines the database type to use.
bool? enable_random_selection
(optional, default = null) Specifies whether or not to randomly query images from the data source....
DB backend
Specifies the backend database.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static DataParameter FromProto(RawProto rp, DataParameter p=null)
Parses the parameter from a RawProto.
Specifies the parameters used by the DebugLayer
static DebugParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters of the DropoutLayer.
static new DropoutParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
This layer produces N >= 1 top blobs. DummyDataParameter must specify 1 or shape fields,...
static DummyDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the EltwiseLayer.
static EltwiseParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the EluLayer.
static new EluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the EmbedLayer.
static EmbedParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ExpLayer.
static ExpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the FlattenLayer.
static FlattenParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GradientScaleLayer.
static GradientScaleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the HDF5 data layer.
static HDF5DataParameter FromProto(RawProto rp, HDF5DataParameter p=null)
Parses the parameter from a RawProto.
Specifies the parameters for the HingLossLayer.
static HingeLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ImageDataLayer
static ImageDataParameter FromProto(RawProto rp, ImageDataParameter p=null)
Parses the parameter from a RawProto.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
Specifies the parameters for the InfogainLossLayer.
static InfogainLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InnerProductLayer.
bool bias_term
Whether to have bias terms or not.
static InnerProductParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InterpLayer.
static InterpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the LRNLayer.
static new LRNParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LSTMAttentionLayer that provides an attention based LSTM layer used ...
static LSTMAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
[DEPRECIATED - use LSTMAttentionParameter instead with enable_attention = false] Specifies the parame...
static LSTMSimpleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
/b DEPRECIATED (use DataLayer DataLabelMappingParameter instead) Specifies the parameters for the Lab...
static LabelMappingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The LayerParameterBase is the base class for all other layer specific parameters.
abstract LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
Specifies the base parameter for all layers.
InterpParameter interp_param
Returns the parameter set when initializing the LayerType.INTERP
Normalization2Parameter normalization2_param
Returns the parameter set when initialized with LayerType.NORMALIZATION2
ConvolutionParameter convolution_param
Returns the parameter set when initialized with LayerType.CONVOLUTION
SerfParameter serf_param
Returns the parameter set when initialized with LayerType.SERF
ParameterParameter parameter_param
Returns the parameter set when initialized with LayerType.PARAMETER
CfcParameter cfc_param
Returns the parameter set when initialized with LayerType.CFC
LayerParameter()
Constructor for the parameter.
TokenizedDataParameter tokenized_data_param
Returns the parameter set when initialized with LayerType.TOKENIZED_DATA
List< ParamSpec > parameters
Specifies the ParamSpec parameters of the LayerParameter.
SliceParameter slice_param
Returns the parameter set when initialized with LayerType.SLICE
ONNX_CONVERSION_SUPPORT m_onnxConversionSupport
Specifies the level of conversion support for the layer.
LogParameter log_param
Returns the parameter set when initialized with LayerType.LOG
void PrepareRunModel()
Prepare the layer settings for a run model.
string name
Specifies the name of this LayerParameter.
List< double > loss_weight
Specifies the loss weight.
ClipParameter clip_param
Returns the parameter set when initialized with LayerType.CLIP
TileParameter tile_param
Returns the parameter set when initialized with LayerType.TILE
LayerType type
Specifies the type of this LayerParameter.
TripletLossParameter triplet_loss_param
Returns the parameter set when initialized with LayerType.TRIPLET_LOSS
ConstantParameter constant_param
Returns the parameter set when initialized with LayerType.CONSTANT
MeanErrorLossParameter mean_error_loss_param
Returns the parameter set when initialized with LayerType.MEAN_ERROR_LOSS
DetectionOutputParameter detection_output_param
Returns the parmeter set when initialized with LayerType.DETECTION_OUTPUT
MultiBoxLossParameter multiboxloss_param
Returns the parameter set when initializing with LayerType.MULTIBOX_LOSS
LtcUnitParameter ltc_unit_param
Returns the parameter set when initialized with LayerType.LTC_UNIT
SoftmaxParameter softmax_param
Returns the parameter set when initialized with LayerType.SOFTMAX
LRNParameter lrn_param
Returns the parameter set when initialized with LayerType.LRN
void CopyDefaults(LayerParameter p)
Copies the defaults from another LayerParameter.
ONNX_CONVERSION_SUPPORT onnx_conversion_support
Returns the level of Onnx conversion support.
List< bool > propagate_down
Specifies whether or not the LayerParameter (or protions of) should be backpropagated.
MultiheadAttentionParameter multihead_attention_param
Returns the parameter set when initialized with LayerType.MULTIHEAD_ATTENTION
LSTMSimpleParameter lstm_simple_param
[DEPRECIATED] Returns the parameter set when initialized with LayerType.LSTM_SIMPLE
SqueezeParameter squeeze_param
Returns the parameter set when initialized with LayerType.RESHAPE
List< NetStateRule > include
Specifies the NetStateRule's for which this LayerParameter should be included.
MathParameter math_param
Returns the parameter set when initialized with LayerType.MATH
SPPParameter spp_param
Returns the parameter set when initialized with LayerType.SPP
ScaleParameter scale_param
Returns the parameter set when initialized with LayerType.SCALE
LayerNormParameter layer_norm_param
Returns the parameter set when initialized with LayerType.LAYERNORM
bool MeetsPhase(Phase phase)
Determines whether or not this LayerParameter meets a given Phase.
int GetParameterCount()
Returns the number of ParamSpec parameters used by the layer.
OneHotParameter onehot_param
Returns the parameter set when initialized with LayerType.ONEHOT
ThresholdParameter threshold_param
Returns the parameter set when initialized with LayerType.THRESHOLD
ReLUParameter relu_param
Returns the parameter set when initialized with LayerType.RELU
void clear_blobs()
Clears the collection of Blobs used by this layer.
bool freeze_learning
Get/set whether or not to freeze the learning for this layer globally.
bool group_start
Specifies whether or not this node is the start of a new group - this is only used when rendering mod...
List< NetStateRule > exclude
Specifies the NetStateRule's for which this LayerParameter should be excluded.
UnPoolingParameter unpooling_param
Returns the parameter set when initialized with LayerType.UNPOOLING
List< string > expected_top
Returns a list of expected top connections (in the bottom, out the top).
ArgMaxParameter argmax_param
Returns the parameter set when initialized with LayerType.ARGMAX
ReductionParameter reduction_param
Returns the parameter set when initialized with LayerType.REDUCTION
PoolingParameter pooling_param
Returns the parameter set when initialized with LayerType.POOLING
LayerParameter(LayerType lt, string strName=null)
The LayerParameter constructor.
SigmoidParameter sigmoid_param
Returns the parameter set when initialized with LayerType.SIGMOID
bool connect_loss_event
Get/set whether or not to connect the loss event to this layer.
TokenizedDataPairsParameter tokenized_data_pairs_param
Returns the parameter set when initialized with LayerType.TOKENIZED_DATA_PAIRS
bool use_halfsize
Specifies whether or not to use half sized memory or not.
GramParameter gram_param
Returns the parameter set when initialized with LayerType.GRAM
ExpParameter exp_param
Returns the parameter set when initialized with LayerType.EXP
static LayerParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
ContrastiveLossParameter contrastive_loss_param
Returns the parameter set when initialized with LayerType.CONTRASTIVE_LOSS
DebugParameter debug_param
Returns the parameter set when initialized with LayerType.DEBUG
GluParameter glu_param
Returns the parameter set when initialized with LayerType.GLU
GrnParameter grn_param
Returns the parameter set when initialized with LayerType.GLU
DummyDataParameter dummy_data_param
Returns the parameter set when initialized with LayerType.DUMMYDATA
PositionalEncoderParameter positional_encoder_param
Returns the parameter set when initialized with LayerType.POSITIONAL_ENCODER
PermuteParameter permute_param
Returns the parameter set when initialized with LayerType.PERMUTE
EltwiseParameter eltwise_param
Returns the parameter set when initialized with LayerType.ELTWISE
void SetType(LayerType type, bool bNewParam=true)
Set the layer type.
MemoryDataParameter memory_data_param
Returns the parameter set when initialized with LayerType.MEMORY_DATA
MultiHeadAttentionInterpParameter multihead_attention_interp_param
Returns the parameter set when initialized with LayerType.MULTIHEAD_ATTENTION_INTERP
NLLLossParameter nll_loss_param
Returns the parameter set when initialized with LayerType.NLL_LOSS
PriorBoxParameter prior_box_param
Returns the parameter set when initialized with LayerType.PRIORBOX
CategoricalTransformationParameter categorical_trans_param
Returns the parameter set when initialized with LayerType.CATEGORICAL_TRANS
InputParameter input_param
Returns the parameter set when initialized with LayerType.INPUT
QuantileAccuracyParameter quantile_accuracy_param
Returns the parameter set when initialized with LayerType.QUANTILE_ACCURACY
PowerParameter power_param
Returns the parameter set when initialized with LayerType.POWER
List< string > expected_bottom
Returns a list of expected bottom connections (in the bottom, out the top).
List< string > top
Specifies the active top connections (in the bottom, out the top)
ReshapeParameter reshape_param
Returns the parameter set when initialized with LayerType.RESHAPE
int solver_count
Returns the number of Solvers participating in a multi-GPU session for which the Solver using this La...
EluParameter elu_param
Returns the parameter set when initialized with LayerType.ELU
MergeParameter merge_param
Returns the parameter set when initialized with LayerType.MERGE
EmbedParameter embed_param
Returns the parameter set when initialized with LayerType.EMBED
ReshapeTemporalParameter reshape_temporal_param
Returns the parameter set when initialized with LayerType.RESHAPE_TEMPORAL
GradientScaleParameter gradient_scale_param
Returns the parameter set when initialized with LayerType.GSL
TextDataParameter text_data_param
Returns the parameter set when initialized with LayerType.TEXT_DATA
AnnotatedDataParameter annotated_data_param
Returns the parameter set when initialized with LayerType.ANNOTATED_DATA
HingeLossParameter hinge_loss_param
Returns the parameter set when initialized with LayerType.HINGE_LOSS
CausalSelfAttentionParameter causal_self_attention_param
Returns the parameter set when initialized with LayerType.CAUSAL_SELF_ATTENTION
int CompareTo(object obj)
Constructor for the parameter.
DataSequenceParameter data_sequence_param
Returns the parameter set when initialized with LayerType.DATA_SEQUENCE
BiasParameter bias_param
Returns the parameter set when initialized with LayerType.BIAS
DetectionEvaluateParameter detection_evaluate_param
Returns the parmeter set when initialized with LayerType.DETECTION_EVALUATE
GateAddNormParameter gateaddnorm_param
Returns the parameter set when initialized with LayerType.GLU
TVLossParameter tv_loss_param
Returns the parameter set when initialized with LayerType.TV_LOSS
NumericTransformationParameter numeric_trans_param
Returns the parameter set when initialized with LayerType.NUMERIC_TRANS
DataNormalizerParameter data_normalizer_param
Returns the parameter set when initialized with LayerType.DATA_NORMALIZER
int solver_rank
Returns the SolverRank of the Solver using this LayerParameter (if any).
InnerProductParameter inner_product_param
Returns the parameter set when initialized with LayerType.INNERPRODUCT
AccuracyParameter accuracy_param
Returns the parameter set when initialized with LayerType.ACCURACY
ConcatParameter concat_param
Returns the parameter set when initialized with LayerType.CONCAT
GatherParameter gather_param
Returns the parameter set when initialized with LayerType.GATHER
HDF5DataParameter hdf5_data_param
Returns the parameter set when initialized with LayerType.HDF5_DATA
TransformerBlockParameter transformer_block_param
Returns the parameter set when initialized with LayerType.TRANSFORMER_BLOCK
MVNParameter mvn_param
Returns the parameter set when initialized with LayerType.MVN
void CopyParameters(LayerParameter src)
Copy just the layer specific parameters to this layer parameter.
CfcUnitParameter cfc_unit_param
Returns the parameter set when initialized with LayerType.CFC_UNIT
string PrepareRunModelInputs()
Prepare model inputs for the run-net (if any are needed for the layer).
KnnParameter knn_param
Returns the parameter set when initialized with LayerType.KNN
QuantileLossParameter quantile_loss_param
Returns the parameter set when initialized with LayerType.QUANTILE_LOSS
MishParameter mish_param
Returns the parameter set when initialized with LayerType.MISH
object Load(BinaryReader br, bool bNewInstance)
Load the parameter from a binary reader.
ScalarParameter scalar_param
Returns the parameter set when initialized with LayerType.SCALAR
DataTemporalParameter data_temporal_param
Returns the parameter set when initialized with LayerType.DATA_TEMPORAL
TransformationParameter transform_param
Returns the parameter set when initialized with LayerType.TRANSFORM
TransposeParameter transpose_param
Returns the parameter set when initialized with LayerType.TRANSPOSE
Normalization1Parameter normalization1_param
Returns the parameter set when initialized with LayerType.NORMALIZATION1
DataParameter data_param
Returns the parameter set when initialized with LayerType.DATA
VarSelNetParameter varselnet_param
Returns the parameter set when initialized with LayerType.VARSELNET
AttentionParameter attention_param
Returns the parameter set when initialized with LayerType.ATTENTION
FlattenParameter flatten_param
Returns the parameter set when initialized with LayerType.FLATTEN
VideoDataParameter video_data_param
Returns the parameter set when initialized with LayerType.VIDEO_DATA
Phase phase
Specifies the Phase for which this LayerParameter is run.
ConvolutionOctaveParameter convolution_octave_param
Returns the parameter set when initialized with LayerType.CONVOLUTION_OCTAVE
DecodeParameter decode_param
Returns the parameter set when initializing with LayerType.DECODE or LayerType.ACCURACY_ENCODING;
RecurrentParameter recurrent_param
Returns the parameter set when initialized with LayerType.RECURRENT
BatchNormParameter batch_norm_param
Returns the parameter set when initialized with LayerType.BATCHNORM
List< string > bottom
Specifies the active bottom connections (in the bottom, out the top).
LayerType
Specifies the layer type.
override RawProto ToProto(string strName)
Constructor for the parameter.
void Save(BinaryWriter bw)
Save this parameter to a binary writer.
override string ToString()
Returns a string representation of the LayerParameter.
static ? LayerType GetType(string strType)
Converts the string type into a LayerType, or null if no match is found.
SwishParameter swish_param
Returns the parameter set when initialized with LayerType.SWISH
GeluParameter gelu_param
Returns the parameter set when initialized with LayerType.GELU
DropoutParameter dropout_param
Returns the parameter set when initialized with LayerType.DROPOUT
LSTMAttentionParameter lstm_attention_param
Returns the parameter set when initialized with LayerType.LSTM_ATTENTION
InfogainLossParameter infogain_loss_param
Returns the parameter set when initialized with LayerType.INFOGAIN_LOSS
PReLUParameter prelu_param
Returns the parameter set when initialized with LayerType.PRELU
LossParameter loss_param
Returns the parameter set when initialized with LayerType.LOSS
ModelDataParameter model_data_param
Returns the parameter set when initialized with LayerType.MODEL_DATA
ImageDataParameter image_data_param
Returns the parameter set when initialized with LayerType.IMAGE_DATA
LabelMappingParameter labelmapping_param
Returns the parameter set when initialized with LayerType.LABELMAPPING
List< BlobProto > blobs
Specifies the blobs of the LayerParameter.
virtual LayerParameter Clone(bool bCloneBlobs)
Creates a new copy of this instance of the parameter.
TanhParameter tanh_param
Returns the parameter set when initialized with LayerType.TANH
ONNX_CONVERSION_SUPPORT
Defines whether a layer node has ONNX conversion support or not.
CropParameter crop_param
Returns the parameter set when initialized with LayerType.CROP
LayerParameter(LayerParameter p)
The LayerParameter constructor.
Specifies the parameters for the LogLayer.
static LogParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by loss layers.
NormalizationMode
How to normalize the loss for loss layers that aggregate across batches, spatial dimensions,...
static LossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MVNLayer.
static MVNParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MathLayer.
Specifies the parameters for the MeanErrorLossLayerParameter.
static MeanErrorLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the MemoryDataLayer.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static MemoryDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the model data layer.
static ModelDataParameter FromProto(RawProto rp, ModelDataParameter p=null)
Parses the parameter from a RawProto.
Specifies a NetStateRule used to determine whether a Net falls within a given include or exclude patt...
override RawProto ToProto(string strName)
Converts a NetStateRule into a RawProto.
static NetStateRule FromProto(RawProto rp)
Parses a RawProto representing a NetStateRule and creates a new instance of a NetStateRule from it.
Phase phase
Set phase to require the NetState to have a particular phase (TRAIN or TEST) to meet this rule.
Specifies the parameters for the PReLULayer.
static PReLUParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies training parameters (multipliers on global learning constants, and the name of other settin...
override RawProto ToProto(string strName)
Converts the ParamSpec into a RawProto.
static ParamSpec FromProto(RawProto rp)
Parses a new ParamSpec from a RawProto.
Specifies the parameters for the ParameterLayer
static ParameterParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PoolingLayer.
static new PoolingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PowerLayer.
static PowerParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReLULayer
static new ReLUParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the RecurrentLayer.
static new RecurrentParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by ReductionLayer.
static ReductionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReshapeLayer.
static ReshapeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The SPPParameter specifies the parameters for the SPPLayer.
static new SPPParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ScaleLayer.
static new ScaleParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SigmoidLayer.
static new SigmoidParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SliceLayer.
static SliceParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SoftmaxLayer
static new SoftmaxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the SwishLayer
static new SwishParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TanhLayer
static new TanhParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameter for the Text data layer.
static TextDataParameter FromProto(RawProto rp, TextDataParameter p=null)
Parses the parameter from a RawProto.
Stores the parameters used by the ThresholdLayer
static ThresholdParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the TileLayer
static TileParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DataSequenceLayer.
static DataSequenceParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DecodeLayer and the AccuracyEncodingLayer.
static DecodeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GatherLayer.
static GatherParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the KnnLayer.
static KnnParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MergeLayer.
static MergeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the MishLayer
static new MishParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the Normalization1Layer.
static Normalization1Parameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Stores the parameters used by the SerfLayer
static new SerfParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the SqueezeLayer.
static SqueezeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TransposeLayer.
static TransposeParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TripletLossLayer.
static TripletLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the UnPoolingLayer.
static new UnPoolingParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the CausalSelfAttentionLayer.
static CausalSelfAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GeluLayer.
static GeluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LayerNormalizationLayer.
static LayerNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiheadAttentionLayer.
static MultiheadAttentionParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the NLLLossLayer.
static NLLLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PositionalEncoderLayer.
static PositionalEncoderParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TokenizedDataPairsLayer.
static new TokenizedDataPairsParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TokenizedDataLayer.
static TokenizedDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the CfcLayer. Note, you must also fill out the CfcUnitParameter.
static CfcParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the CfcUnitLayer used by the CfCLayer.
static CfcUnitParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the LtcUnitLayer used by the CfCLayer.
static LtcUnitParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GramLayer
static GramParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters used by the OneHotLayer
static OneHotParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ScalarLayer
static ScalarParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the TVLossLayer
static TVLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the InputLayer.
override LayerParameterBase Clone()
Creates a new copy of this instance of the parameter.
static AnnotatedDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DetectionEvaluateLayer.
static DetectionEvaluateParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DetectionOutputLayer.
static DetectionOutputParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiBoxLossParameter.
static MultiBoxLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the Normalization2Layer used in SSD.
static Normalization2Parameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PermuteLayer.
static PermuteParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the PriorBoxParameter.
static PriorBoxParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the VideoDataLayer.
static VideoDataParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the DataTemporalLayer (used in TFT models).
static DataTemporalParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GetAddNormLayer (Gate Add Norm).
static GateAddNormParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GluLayer (Gated Linear Unit).
static GluParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the GrnLayer (Gated Response Network).
static GrnParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the MultiHeadAttentionInterpLayer (Interpretable Multi-Head Attention La...
static MultiHeadAttentionInterpParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the QuantileAccuracyLayer used in TFT models
static QuantileAccuracyParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the QuantileLossLayer used in TFT models
static QuantileLossParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the ReshapeTemporalLayer.
static ReshapeTemporalParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
Specifies the parameters for the VarSelNetLayer (Variable Selection Network).
static VarSelNetParameter FromProto(RawProto rp)
Parses the parameter from a RawProto.
The IBinaryPersist interface provides generic save and load functionality.
void Save(BinaryWriter bw)
Save to a binary writer.
object Load(BinaryReader br, bool bNewInstance=true)
Load from a binary reader.
The MyCaffe.basecode contains all generic types used throughout MyCaffe.
Phase
Defines the Phase under which to run a Net.
@ DATA
Specifies a data gym that collects data from a data source, such as a database.
@ RECURRENT
Defines the recurrent training method.
@ NONE
No training category specified.
@ RNN
Run the trainer in RNN mode.
The MyCaffe.common namespace contains common MyCaffe classes.
@ SOFTMAX
Specifies to use softmax.
@ TANH
Specifies to run the tanh function.
@ ATTENTION
The blob contains attention scores.
@ ACCURACY
The Blob holds Accuracy Data.
@ CLIP
The blob holds Clip data.
@ LOSS
The Blob holds Loss Data.
@ LSTM
Specifies to use a 4 gate LSTM Recurrent Learning unit.
@ LOG
Specifies to use the log algorithm.
@ BIAS
Bias weights are targeted.
The MyCaffe.param.beta parameters are used by the MyCaffe.layer.beta layers.
The MyCaffe.param.nt namespace defines the parameters used by the Nerual Style Transfer layers.
The MyCaffe.param.ssd namespace contains all SSD related parameter objects that correspond to the nat...
The MyCaffe.param namespace contains parameters used to create models.
The MyCaffe namespace contains the main body of MyCaffe code that closesly tracks the C++ Caffe open-...