/** * Get layer output type. * * @param inputType Array of InputTypes * @return output type as InputType * @throws InvalidKerasConfigurationException */ @Override public InputType getOutputType(InputType... inputType) { return this.vertex.getOutputType(-1, inputType); } }
gv.getOutputType(currLayerIdx, inputTypeList.toArray(new InputType[inputTypeList.size()])); vertexOutputs.put(s, outputFromVertex); MemoryReport mr = gv.getMemoryReport(inputTypeList.toArray(new InputType[inputTypeList.size()]));
numParamsForVertex[i] = n.numParams(true); numParams += numParamsForVertex[i]; i++; org.deeplearning4j.nn.conf.graph.GraphVertex n = nodeEntry.getValue(); String name = nodeEntry.getKey(); GraphVertex gv = n.instantiate(this, name, vertexNumber, paramsViewForVertex[vertexNumber], initializeParams);
@Override public ComputationGraphConfiguration clone() { ComputationGraphConfiguration conf = new ComputationGraphConfiguration(); conf.vertices = new LinkedHashMap<>(); for (Map.Entry<String, GraphVertex> entry : this.vertices.entrySet()) { conf.vertices.put(entry.getKey(), entry.getValue().clone()); } conf.vertexInputs = new LinkedHashMap<>(); for (Map.Entry<String, List<String>> entry : this.vertexInputs.entrySet()) { conf.vertexInputs.put(entry.getKey(), new ArrayList<>(entry.getValue())); } conf.networkInputs = new ArrayList<>(); conf.networkInputs = new ArrayList<>(this.networkInputs); conf.networkOutputs = new ArrayList<>(this.networkOutputs); conf.pretrain = pretrain; conf.backprop = backprop; conf.backpropType = backpropType; conf.tbpttFwdLength = tbpttFwdLength; conf.tbpttBackLength = tbpttBackLength; conf.defaultConfiguration = defaultConfiguration.clone(); conf.trainingWorkspaceMode = trainingWorkspaceMode; conf.inferenceWorkspaceMode = inferenceWorkspaceMode; conf.cacheMode = this.cacheMode; conf.defaultConfiguration.cacheMode = this.cacheMode; return conf; }
/** * Add a {@link GraphVertex} to the network configuration. A GraphVertex defines forward and backward pass methods, * and can contain a {@link LayerVertex}, a {@link org.deeplearning4j.nn.conf.graph.ElementWiseVertex} to do element-wise * addition/subtraction, a {@link MergeVertex} to combine/concatenate the activations out of multiple layers or vertices, * a {@link org.deeplearning4j.nn.conf.graph.SubsetVertex} to select a subset of the activations out of another layer/GraphVertex.<br> * Custom GraphVertex objects (that extend the abstract {@link GraphVertex} class) may also be used. * * @param vertexName The name of the GraphVertex to add * @param vertex The GraphVertex to add * @param vertexInputs The inputs/activations to this GraphVertex */ public GraphBuilder addVertex(String vertexName, GraphVertex vertex, String... vertexInputs) { vertices.put(vertexName, vertex); //Automatically insert a MergeNode if this vertex can only take 1 input (layer vertices, etc) if (vertex.maxVertexInputs() == 1 && vertexInputs != null && vertexInputs.length > 1) { String mergeName = vertexName + "-merge"; addVertex(mergeName, new MergeVertex(), vertexInputs); this.vertexInputs.put(vertexName, Collections.singletonList(mergeName)); } else if (vertexInputs != null) { this.vertexInputs.put(vertexName, Arrays.asList(vertexInputs)); } return this; }
.entrySet()) { org.deeplearning4j.nn.conf.graph.GraphVertex n = nodeEntry.getValue(); numParamsForVertex[i] = n.numParams(true); numParams += numParamsForVertex[i]; i++;
/** * Get layer output type. * * @param inputType Array of InputTypes * @return output type as InputType * @throws InvalidKerasConfigurationException */ @Override public InputType getOutputType(InputType... inputType) { return this.vertex.getOutputType(-1, inputType); } }
gv.getOutputType(currLayerIdx, inputTypeList.toArray(new InputType[inputTypeList.size()])); vertexOutputs.put(s, outputFromVertex);