@Override public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException { JsonNode node = jp.getCodec().readTree(jp); JsonNode arr = node.get("dataBuffer"); int rank = node.get("rankField").asInt(); int numElements = node.get("numElements").asInt(); int offset = node.get("offsetField").asInt(); JsonNode shape = node.get("shapeField"); JsonNode stride = node.get("strideField"); String type = node.get("typeField").asText(); int[] realShape = new int[rank]; int[] realStride = new int[rank]; DataBuffer buff = Nd4j.createBuffer(numElements); for (int i = 0; i < numElements; i++) { buff.put(i, arr.get(i).asDouble()); } String ordering = node.get("orderingField").asText(); for (int i = 0; i < rank; i++) { realShape[i] = shape.get(i).asInt(); realStride[i] = stride.get(i).asInt(); } INDArray ret = type.equals("real") ? Nd4j.create(buff, realShape, realStride, offset, ordering.charAt(0)) : Nd4j.createComplex(buff, realShape, realStride, offset, ordering.charAt(0)); return ret; } }
@Override public INDArray deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); if (node == null) return null; int size = node.size(); double[] d = new double[size]; for (int i = 0; i < size; i++) { d[i] = node.get(i).asDouble(); } return Nd4j.create(d); } }
throws IOException, JsonProcessingException { JsonNode node = p.getCodec().readTree(p); String simpleName = node.get("distribution").asText(); return new BetaDistribution(node.get("alpha").asDouble(), node.get("beta").asDouble()); case "CauchyDistribution": return new CauchyDistribution(node.get("median").asDouble(), node.get("scale").asDouble()); case "ChiSquaredDistribution": return new ChiSquaredDistribution(node.get("dof").asDouble()); case "ExponentialDistribution": return new ExponentialDistribution(node.get("mean").asDouble()); case "FDistribution": return new FDistribution(node.get("numeratorDof").asDouble(), node.get("denominatorDof").asDouble()); case "GammaDistribution": return new GammaDistribution(node.get("shape").asDouble(), node.get("scale").asDouble()); case "LevyDistribution": return new LevyDistribution(node.get("mu").asDouble(), node.get("c").asDouble()); case "LogNormalDistribution": return new LogNormalDistribution(node.get("scale").asDouble(), node.get("shape").asDouble()); case "NormalDistribution": return new NormalDistribution(node.get("mean").asDouble(), node.get("stdev").asDouble()); case "ParetoDistribution": return new ParetoDistribution(node.get("scale").asDouble(), node.get("shape").asDouble()); case "TDistribution": return new TDistribution(node.get("dof").asDouble()); case "TriangularDistribution": return new TriangularDistribution(node.get("a").asDouble(), node.get("b").asDouble(), node.get("c").asDouble()); case "UniformRealDistribution":
if (vertices == null) { JsonNode jsonNode = mapper.readTree(json); vertices = jsonNode.get("vertices"); JsonNode vertexNode = vertices.get(layerName); JsonNode layerVertexNode = vertexNode.get("LayerVertex"); if (layerVertexNode == null || !layerVertexNode.has("layerConf") || !layerVertexNode.get("layerConf").has("layer")) { continue; JsonNode layerWrapperNode = layerVertexNode.get("layerConf").get("layer"); if (layerWrapperNode == null || layerWrapperNode.size() != 1) { continue; JsonNode layerNode = layerWrapperNode.elements().next(); JsonNode activationFunction = layerNode.get("activationFunction"); //Should only have 1 element: "dense", "output", etc IActivation ia = Activation.fromString(activationFunction.asText()).getActivationFunction(); ((BaseLayer) layer).setActivationFn(ia);
if (node.has("normal")) { JsonNode n = node.get("normal"); if (!n.has("mean") || !n.has("std")) { throw new JsonParseException("Cannot deserialize Distribution: legacy format 'normal' wrapper object " + " is missing 'mean' or 'std' field", jp.getCurrentLocation()); double m = n.get("mean").asDouble(); double s = n.get("std").asDouble(); return new NormalDistribution(m, s); } else if (node.has("gaussian")) { JsonNode n = node.get("gaussian"); if (!n.has("mean") || !n.has("std")) { throw new JsonParseException("Cannot deserialize Distribution: legacy format 'gaussian' wrapper object " + " is missing 'mean' or 'std' field", jp.getCurrentLocation()); double m = n.get("mean").asDouble(); double s = n.get("std").asDouble(); return new GaussianDistribution(m, s); } else if (node.has("uniform")) { JsonNode n = node.get("uniform"); if (!n.has("lower") || !n.has("upper")) { throw new JsonParseException("Cannot deserialize Distribution: legacy format 'uniform' wrapper object " + " is missing 'lower' or 'upper' field", jp.getCurrentLocation()); double l = n.get("lower").asDouble(); double u = n.get("upper").asDouble(); return new UniformDistribution(l, u); } else if (node.has("binomial")) {
@Override public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); double value = node.asDouble(); return new AtomicDouble(value); } }
ArrayNode classesNode = (ArrayNode) n.get("classes"); List<Integer> classes = new ArrayList<>(); for (JsonNode cn : classesNode) { classes.add(cn.asInt()); ObjectNode matrix = (ObjectNode) n.get("matrix"); Iterator<Map.Entry<String, JsonNode>> matrixIter = matrix.fields(); while (matrixIter.hasNext()) { Iterator<JsonNode> iterCnt = innerMultiSetCount.iterator(); while (iterKey.hasNext()) { int predictedClass = iterKey.next().asInt(); int count = iterCnt.next().asInt();
public void setGraphConfiguration(JsonNode conf) { if(conf != null) { String json = conf.toString(); if(json != null && !json.equals("null")) { net = new ComputationGraph(ComputationGraphConfiguration.fromJson(json)); net.init(); } } }
throws IOException, JsonProcessingException { JsonNode node = p.getCodec().readTree(p); String simpleName = node.get("distribution").asText(); return new BetaDistribution(node.get("alpha").asDouble(), node.get("beta").asDouble()); case "CauchyDistribution": return new CauchyDistribution(node.get("median").asDouble(), node.get("scale").asDouble()); case "ChiSquaredDistribution": return new ChiSquaredDistribution(node.get("dof").asDouble()); case "ExponentialDistribution": return new ExponentialDistribution(node.get("mean").asDouble()); case "FDistribution": return new FDistribution(node.get("numeratorDof").asDouble(), node.get("denominatorDof").asDouble()); case "GammaDistribution": return new GammaDistribution(node.get("shape").asDouble(), node.get("scale").asDouble()); case "LevyDistribution": return new LevyDistribution(node.get("mu").asDouble(), node.get("c").asDouble()); case "LogNormalDistribution": return new LogNormalDistribution(node.get("scale").asDouble(), node.get("shape").asDouble()); case "NormalDistribution": return new NormalDistribution(node.get("mean").asDouble(), node.get("stdev").asDouble()); case "ParetoDistribution": return new ParetoDistribution(node.get("scale").asDouble(), node.get("shape").asDouble()); case "TDistribution": return new TDistribution(node.get("dof").asDouble()); case "TriangularDistribution": return new TriangularDistribution(node.get("a").asDouble(), node.get("b").asDouble(), node.get("c").asDouble()); case "UniformRealDistribution":
JsonNode jsonNode = mapper.readTree(json); if (confs == null) { confs = jsonNode.get("confs"); if (outputLayerNNCNode == null) return conf; //Should never happen... JsonNode outputLayerNode = outputLayerNNCNode.get("layer"); if (outputLayerNode.has("output")) { lossFunctionNode = outputLayerNode.get("output").get("lossFunction"); } else if (outputLayerNode.has("rnnoutput")) { lossFunctionNode = outputLayerNode.get("rnnoutput").get("lossFunction"); String lossFunctionEnumStr = lossFunctionNode.asText(); LossFunctions.LossFunction lossFunction = null; try { JsonNode jsonNode = mapper.readTree(json); if (confs == null) { confs = jsonNode.get("confs"); if (outputLayerNNCNode == null) return conf; //Should never happen... JsonNode layerWrapperNode = outputLayerNNCNode.get("layer"); if (layerWrapperNode == null || layerWrapperNode.size() != 1) { continue; JsonNode layerNode = layerWrapperNode.elements().next();
@Override public AtomicDouble deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); double value = node.asDouble(); return new AtomicDouble(value); } }
public void setLayerConfiguration(JsonNode conf) { if(conf != null) { String json = conf.toString(); if(json != null && !json.equals("null")) { net = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(json)); net.init(); } } }
@Override public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException { JsonNode node = jp.getCodec().readTree(jp); JsonNode arr = node.get("dataBuffer"); int rank = node.get("rankField").asInt(); int numElements = node.get("numElements").asInt(); int offset = node.get("offsetField").asInt(); JsonNode shape = node.get("shapeField"); JsonNode stride = node.get("strideField"); String type = node.get("typeField").asText(); int[] realShape = new int[rank]; int[] realStride = new int[rank]; DataBuffer buff = Nd4j.createBuffer(numElements); for (int i = 0; i < numElements; i++) { buff.put(i, arr.get(i).asDouble()); } String ordering = node.get("orderingField").asText(); for (int i = 0; i < rank; i++) { realShape[i] = shape.get(i).asInt(); realStride[i] = stride.get(i).asInt(); } INDArray ret = type.equals("real") ? Nd4j.create(buff, realShape, realStride, offset, ordering.charAt(0)) : Nd4j.createComplex(buff, realShape, realStride, offset, ordering.charAt(0)); return ret; } }
@Override public INDArray deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); if (node == null) return null; int size = node.size(); double[] d = new double[size]; for (int i = 0; i < size; i++) { d[i] = node.get(i).asDouble(); } return Nd4j.create(d); } }
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { JsonNode node = p.getCodec().readTree(p); String className = node.get("class").asText(); Class<?> c; try { c = Class.forName(className); } catch (Exception e) { throw new RuntimeException(e); } JsonNode valueNode = node.get("value"); Object o = new ObjectMapper().treeToValue(valueNode, c); return o; } }
@Override public IntegerDistribution deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { JsonNode node = p.getCodec().readTree(p); String simpleName = node.get("distribution").asText(); switch (simpleName) { case "BinomialDistribution": return new BinomialDistribution(node.get("trials").asInt(), node.get("p").asDouble()); case "GeometricDistribution": return new GeometricDistribution(node.get("p").asDouble()); case "HypergeometricDistribution": return new HypergeometricDistribution(node.get("populationSize").asInt(), node.get("numberOfSuccesses").asInt(), node.get("sampleSize").asInt()); case "PascalDistribution": return new PascalDistribution(node.get("r").asInt(), node.get("p").asDouble()); case "PoissonDistribution": return new PoissonDistribution(node.get("p").asDouble()); case "UniformIntegerDistribution": return new UniformIntegerDistribution(node.get("lower").asInt(), node.get("upper").asInt()); case "ZipfDistribution": return new ZipfDistribution(node.get("numElements").asInt(), node.get("exponent").asDouble()); default: throw new RuntimeException("Unknown or not supported distribution: " + simpleName); } } }
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { JsonNode node = p.getCodec().readTree(p); String className = node.get("@class").asText(); Class<?> c; try { c = Class.forName(className); } catch (Exception e) { throw new RuntimeException(e); } JsonNode valueNode = node.get("value"); Object o = new ObjectMapper().treeToValue(valueNode, c); return o; } }