doPrep(); editedModel = new MultiLayerNetwork(constructConf(), constructParams()); if (frozenTill != -1) { org.deeplearning4j.nn.api.Layer[] layers = editedModel.getLayers();
private void doPrep() { fineTuneConfigurationBuild(); for (int i = 0; i < editedLayersSorted.length; i++) { int layerNum = editedLayersSorted[i]; nOutReplaceBuild(layerNum, editedLayersMap.get(layerNum).getLeft(), editedLayersMap.get(layerNum).getMiddle(), editedLayersMap.get(layerNum).getRight());
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * Can specify different weight init schemes for the specified layer and the layer that follows it. * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param dist Distribution to use for parmas in layerNum * @param schemeNext Weight init scheme to use for params in layerNum+1 * @return Builder * @see org.deeplearning4j.nn.weights.WeightInit DISTRIBUTION */ public Builder nOutReplace(int layerNum, int nOut, Distribution dist, WeightInit schemeNext) { return nOutReplace(layerNum, nOut, WeightInit.DISTRIBUTION, schemeNext, dist, null); }
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * Can specify different weight init schemes for the specified layer and the layer that follows it. * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param dist Distribution to use for params in the layerNum * @param distNext Distribution to use for parmas in layerNum+1 * @return Builder * @see org.deeplearning4j.nn.weights.WeightInit DISTRIBUTION */ public Builder nOutReplace(int layerNum, int nOut, Distribution dist, Distribution distNext) { return nOutReplace(layerNum, nOut, WeightInit.DISTRIBUTION, WeightInit.DISTRIBUTION, dist, distNext); }
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * Can specify different weight init schemes for the specified layer and the layer that follows it. * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param scheme Weight init scheme to use for params in layerNum * @param distNext Distribution to use for parmas in layerNum+1 * @return Builder * @see org.deeplearning4j.nn.weights.WeightInit DISTRIBUTION */ public Builder nOutReplace(int layerNum, int nOut, WeightInit scheme, Distribution distNext) { return nOutReplace(layerNum, nOut, scheme, WeightInit.DISTRIBUTION, null, distNext); }
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * Can specify different weight init schemes for the specified layer and the layer that follows it. * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param scheme Weight Init scheme to use for params in the layerNum * @param schemeNext Weight Init scheme to use for params in the layerNum+1 * @return Builder */ public Builder nOutReplace(int layerNum, int nOut, WeightInit scheme, WeightInit schemeNext) { return nOutReplace(layerNum, nOut, scheme, schemeNext, null, null); }
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param dist Distribution to use in conjunction with weight init DISTRIBUTION for params in layernum and layernum+1 * @return Builder * @see org.deeplearning4j.nn.weights.WeightInit DISTRIBUTION */ public Builder nOutReplace(int layerNum, int nOut, Distribution dist) { return nOutReplace(layerNum, nOut, WeightInit.DISTRIBUTION, WeightInit.DISTRIBUTION, dist, dist); }
/** * Modify the architecture of a layer by changing nOut * Note this will also affect the layer that follows the layer specified, unless it is the output layer * * @param layerNum The index of the layer to change nOut of * @param nOut Value of nOut to change to * @param scheme Weight Init scheme to use for params in layernum and layernum+1 * @return Builder */ public Builder nOutReplace(int layerNum, int nOut, WeightInit scheme) { return nOutReplace(layerNum, nOut, scheme, scheme, null, null); }