network.fit(trainIterator); ModelSerializer.writeModel(network, modelFile,true); long endTime = System.currentTimeMillis(); System.out.println("=============run time=====================" + (endTime - startTime));
W = Dropout.applyDropConnect(this, CDAEParamInitializer.WEIGHT_KEY);
@Override public void initialize(Map<String, Param> params) throws TikaConfigException { try { if (serialize) { if (cacheDir.exists()) { model = ModelSerializer.restoreComputationGraph(cacheDir); LOG.info("Preprocessed Model Loaded from {}", cacheDir); } else { LOG.warn("Preprocessed Model doesn't exist at {}", cacheDir); cacheDir.getParentFile().mkdirs(); ZooModel zooModel = VGG16.builder().build(); model = (ComputationGraph)zooModel.initPretrained(PretrainedType.IMAGENET); LOG.info("Saving the Loaded model for future use. Saved models are more optimised to consume less resources."); ModelSerializer.writeModel(model, cacheDir, true); } } else { LOG.info("Weight graph model loaded via dl4j Helper functions"); ZooModel zooModel = VGG16.builder().build(); model = (ComputationGraph)zooModel.initPretrained(PretrainedType.IMAGENET); } imageNetLabels = new ImageNetLabels(); available = true; } catch (Exception e) { available = false; LOG.warn(e.getMessage(), e); throw new TikaConfigException(e.getMessage(), e); } }
/** * Returns a list of non flattened moving window matrices * @return the list of matrices */ public List<INDArray> windows() { return windows(false); }
@Override public boolean addAll(Collection<? extends E> c) { for (E e : c) addAndSave(e); return true; }
/** * Total variance in target attribute * @param residuals error * @param targetAttribute data for target attribute * @return Total variance in target attribute */ public static double ssTotal(double[] residuals, double[] targetAttribute) { return ssReg(residuals, targetAttribute) + ssError(residuals, targetAttribute); }
public StringGrid getUniqueRows() { StringGrid ret = new StringGrid(this); ret.stripDuplicateRows(); return ret; }
@Override public void reset() { cursor = 0; curr = null; if (shuffle) MathUtils.shuffleArray(order, rng); }
/** * Loads the model from the location that it saves to */ @Override public <E> E load(Class<E> clazz) { return SerializationUtils.readObject(file); } }
/** * Escape commas in the string using the default escape char * @param str a string * @return an escaped string */ public static String escapeString(String str) { return escapeString(str, ESCAPE_CHAR, COMMA); }
/** * Decodes the given labels, assuming its a binary label matrix * @param labels the labels as a binary label matrix * @return the decoded labels and the most likely outcome of the sequence */ public Pair<Double, INDArray> decode(INDArray labels) { return decode(labels, true); }
@Override public void update(INDArray gradient, String paramType) { if (!logUpdate) { OneTimeLogger.info(log, "Frozen layers will not be updated. Warning will be issued only once per instance"); logUpdate = true; } //no op }
@Override public void write(DataOutputStream dos) { SerializationUtils.writeObject(masterResults,dos); } }
f.getParentFile().mkdirs(); ModelSerializer.writeModel(net, f, true); log.info("Saved network to {}", outpath);
/** * Returns a list of non flattened moving window matrices * @return the list of matrices */ public List<INDArray> windows() { return windows(false); }
@Override public boolean addAll(Collection<? extends E> c) { for (E e : c) addAndSave(e); return true; }
/** * Escape <code>charToEscape</code> in the string * with the escape char <code>escapeChar</code> * * @param str string * @param escapeChar escape char * @param charToEscape the char to be escaped * @return an escaped string */ public static String escapeString(String str, char escapeChar, char charToEscape) { return escapeString(str, escapeChar, new char[] {charToEscape}); }
f.getParentFile().mkdirs(); ModelSerializer.writeModel(sparkNet.getNetwork(), f, true); log.info("Saved network checkpoint to {}", outpath);
FileSystem fileSystem = FileSystem.get(sc.hadoopConfiguration()); try (BufferedOutputStream os = new BufferedOutputStream(fileSystem.create(new Path(networkPath)))) { ModelSerializer.writeModel(sparkNet.getNetwork(), os, true);