public DataSet next(int batchSize) { return next(batchSize, 0); }
/** * Create Cifar data specific iterator * * @param batchSize the batch size of the examples * @param imgDim an array of height, width and channels * @param numExamples the overall number of examples * @param imageTransform the transformation to apply to the images * @param useSpecialPreProcessCifar use Zagoruyko's preprocess for Cifar * @param train true if use training set and false for test */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numPossibleLables, ImageTransform imageTransform, boolean useSpecialPreProcessCifar, boolean train) { super(null, batchSize, 1, numExamples); this.loader = new CifarLoader(imgDim[0], imgDim[1], imgDim[2], imageTransform, train, useSpecialPreProcessCifar); int totalExamples = train ? CifarLoader.NUM_TRAIN_IMAGES : CifarLoader.NUM_TEST_IMAGES; this.numExamples = numExamples > totalExamples ? totalExamples : numExamples; this.numPossibleLabels = numPossibleLables; this.imageTransform = imageTransform; this.useSpecialPreProcessCifar = useSpecialPreProcessCifar; this.train = train; }
@Override public List<String> getLabels() { return loader.getLabels(); }
protected void load() { if (!cifarRawFilesExist() && !fullDir.exists()) { generateMaps(); fullDir.mkdir(); downloadAndUntar(cifarDataMap, new File(BASE_DIR, localDir)); defineLabels(); if (useSpecialPreProcessCifar && train && !cifarProcessedFilesExists()) { for (int i = fileNum + 1; i <= (TRAINFILENAMES.length); i++) { inputStream = trainInputStream; DataSet result = convertDataSet(numToConvertDS); result.save(new File(trainFilesSerialized + i + ".ser")); DataSet result = convertDataSet(numToConvertDS); result.save(new File(testFilesSerialized)); setInputStream();
@Override public DataSet next(int batchSize) { if (useCurrent) { useCurrent = false; return last; } DataSet result; if (useSpecialPreProcessCifar) { result = loader.next(batchSize, exampleCount); } else result = loader.next(batchSize); exampleCount += batchSize; batchNum++; if ((result.getFeatureMatrix() == null || result == new DataSet()) || (maxNumBatches > -1 && batchNum >= maxNumBatches)) { overshot = true; return last; } if (preProcessor != null) preProcessor.preProcess(result); last = result; if (loader.getLabels() != null) result.setLabelNames(loader.getLabels()); return result; }
matConversion = convertMat(byteFeature); try { dataSets.add(new DataSet(asMatrix(matConversion.getSecond()), matConversion.getFirst())); batchNumCount++; } catch (Exception e) { uTempMean = uChannel.meanNumber().doubleValue(); uStd += varManual(uChannel, uTempMean); uMean += uTempMean; vTempMean = vChannel.meanNumber().doubleValue(); vStd += varManual(vChannel, vTempMean); vMean += vTempMean; data.setFeatures(data.getFeatureMatrix().div(255));
@Override public void reset() { exampleCount = 0; overshot = false; batchNum = 0; loader.reset(); }
public DataSet next(int batchSize, int exampleNum) { List<DataSet> temp = new ArrayList<>(); DataSet result; if (cifarProcessedFilesExists() && useSpecialPreProcessCifar) { if (exampleNum == 0 || ((exampleNum / fileNum) == numToConvertDS && train)) { fileNum++; result = temp.get(0); } else { result = convertDataSet(batchSize);
@Override public DataSet next(int batchSize) { if (useCurrent) { useCurrent = false; return last; } DataSet result; if (useSpecialPreProcessCifar) { result = loader.next(batchSize, exampleCount); } else result = loader.next(batchSize); exampleCount += batchSize; batchNum++; if ((result.getFeatures() == null || result == new DataSet()) || (maxNumBatches > -1 && batchNum >= maxNumBatches)) { overshot = true; return last; } if (preProcessor != null) preProcessor.preProcess(result); last = result; if (loader.getLabels() != null) result.setLabelNames(loader.getLabels()); return result; }
@Override public void reset() { exampleCount = 0; overshot = false; batchNum = 0; loader.reset(); }
@Override public List<String> getLabels() { return loader.getLabels(); }
/** * Create Cifar data specific iterator * * @param batchSize the batch size of the examples * @param imgDim an array of height, width and channels * @param numExamples the overall number of examples * @param imageTransform the transformation to apply to the images * @param useSpecialPreProcessCifar use Zagoruyko's preprocess for Cifar * @param train true if use training set and false for test * @param rngSeed Seed for RNG repeatability * @param randomize If true: randomize the iteration order of the images */ public CifarDataSetIterator(int batchSize, int numExamples, int[] imgDim, int numPossibleLables, ImageTransform imageTransform, boolean useSpecialPreProcessCifar, boolean train, long rngSeed, boolean randomize) { super(null, batchSize, 1, numExamples); this.loader = new CifarLoader(imgDim[0], imgDim[1], imgDim[2], imageTransform, train, useSpecialPreProcessCifar, null, rngSeed, randomize); int totalExamples = train ? CifarLoader.NUM_TRAIN_IMAGES : CifarLoader.NUM_TEST_IMAGES; this.numExamples = numExamples > totalExamples ? totalExamples : numExamples; this.numPossibleLabels = numPossibleLables; this.imageTransform = imageTransform; this.useSpecialPreProcessCifar = useSpecialPreProcessCifar; this.train = train; }