@Override Worker createWorker(int randomSeed, int iter, Iterable<List<String>> batch) { return new CBOWWorker(randomSeed, iter, batch); } }
@Override Worker createWorker(int randomSeed, int iter, Iterable<List<String>> batch) { return new SkipGramWorker(randomSeed, iter, batch); } }
@Override NeuralNetworkTrainer createTrainer(NeuralNetworkConfig config, Multiset<String> counts, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return new CBOWModelTrainer(config, counts, huffmanNodes, listener); }
for (int c = 0; c < layer1_size; c++) neu1e[c] = 0; nextRandom = incrementRandom(nextRandom); handleNegativeSampling(huffmanNode);
NeuralNetworkTrainer(NeuralNetworkConfig config, Multiset<String> vocab, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { this.config = config; this.huffmanNodes = huffmanNodes; this.listener = listener; this.vocabSize = huffmanNodes.size(); this.numTrainedTokens = vocab.size(); this.layer1_size = config.layerSize; this.window = config.windowSize; this.actualWordCount = new AtomicInteger(); this.alpha = config.initialLearningRate; this.syn0 = new double[vocabSize][layer1_size]; this.syn1 = new double[vocabSize][layer1_size]; this.syn1neg = new double[vocabSize][layer1_size]; this.table = new int[TABLE_SIZE]; initializeSyn0(); initializeUnigramTable(); }
neu1e[c] = 0; nextRandom = incrementRandom(nextRandom); int b = (int)((nextRandom % window) + window) % window; handleNegativeSampling(huffmanNode);
this.initialLearningRate = MoreObjects.firstNonNull(initialLearningRate, type.getDefaultInitialLearningRate()); if (this.numThreads == null) this.numThreads = Runtime.getRuntime().availableProcessors(); minFrequency, vocab, new NeuralNetworkConfig( type, numThreads,
/** @return {@link NeuralNetworkTrainer} */ public NeuralNetworkTrainer createTrainer(ImmutableMultiset<String> vocab, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return type.createTrainer(this, vocab, huffmanNodes, listener); }
@Override NeuralNetworkTrainer createTrainer(NeuralNetworkConfig config, Multiset<String> counts, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return new SkipGramModelTrainer(config, counts, huffmanNodes, listener); }
@Override public String toString() { return String.format("%s with %s threads, %s iterations[%s layer size, %s window, %s hierarchical softmax, %s negative samples, %s initial learning rate, %s down sample rate]", type.name(), numThreads, iterations, layerSize, windowSize, useHierarchicalSoftmax ? "using" : "not using", negativeSamples, initialLearningRate, downSampleRate ); } }
label = 1; } else { nextRandom = incrementRandom(nextRandom); target = table[(int) (((nextRandom >> 16) % TABLE_SIZE) + TABLE_SIZE) % TABLE_SIZE]; if (target == 0)
for (int c = 0; c < layer1_size; c++) neu1e[c] = 0; nextRandom = incrementRandom(nextRandom); handleNegativeSampling(huffmanNode);
NeuralNetworkTrainer(NeuralNetworkConfig config, Multiset<String> vocab, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { this.config = config; this.huffmanNodes = huffmanNodes; this.listener = listener; this.vocabSize = huffmanNodes.size(); this.numTrainedTokens = vocab.size(); this.layer1_size = config.layerSize; this.window = config.windowSize; this.actualWordCount = new AtomicInteger(); this.alpha = config.initialLearningRate; this.syn0 = new double[vocabSize][layer1_size]; this.syn1 = new double[vocabSize][layer1_size]; this.syn1neg = new double[vocabSize][layer1_size]; this.table = new int[TABLE_SIZE]; initializeSyn0(); initializeUnigramTable(); }
@Override Worker createWorker(int randomSeed, int iter, Iterable<List<String>> batch) { return new CBOWWorker(randomSeed, iter, batch); } }
@Override Worker createWorker(int randomSeed, int iter, Iterable<List<String>> batch) { return new SkipGramWorker(randomSeed, iter, batch); } }
@Override NeuralNetworkTrainer createTrainer(NeuralNetworkConfig config, Multiset<String> counts, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return new CBOWModelTrainer(config, counts, huffmanNodes, listener); }
/** @return {@link NeuralNetworkTrainer} */ public NeuralNetworkTrainer createTrainer(ImmutableMultiset<String> vocab, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return type.createTrainer(this, vocab, huffmanNodes, listener); }
@Override NeuralNetworkTrainer createTrainer(NeuralNetworkConfig config, Multiset<String> counts, Map<String, HuffmanNode> huffmanNodes, TrainingProgressListener listener) { return new SkipGramModelTrainer(config, counts, huffmanNodes, listener); }
@Override public String toString() { return String.format("%s with %s threads, %s iterations[%s layer size, %s window, %s hierarchical softmax, %s negative samples, %s initial learning rate, %s down sample rate]", type.name(), numThreads, iterations, layerSize, windowSize, useHierarchicalSoftmax ? "using" : "not using", negativeSamples, initialLearningRate, downSampleRate ); } }
label = 1; } else { nextRandom = incrementRandom(nextRandom); target = table[(int) (((nextRandom >> 16) % TABLE_SIZE) + TABLE_SIZE) % TABLE_SIZE]; if (target == 0)