@SuppressWarnings("deprecation") private void updateProgressLog(final MasterContext<NNParams, NNParams> context) { int currentIteration = context.getCurrentIteration(); if(context.isFirstIteration()) { // first iteration is used for training preparation return; } String progress = new StringBuilder(200).append(" Trainer ").append(this.trainerId).append(" Epoch #") .append(currentIteration - 1).append(" Training Error:") .append(String.format("%.10f", context.getMasterResult().getTrainError())).append(" Validation Error:") .append(String.format("%.10f", context.getMasterResult().getTestError())).append("\n").toString(); try { LOG.debug("Writing progress results to {} {}", context.getCurrentIteration(), progress.toString()); this.progressOutput.write(progress.getBytes("UTF-8")); this.progressOutput.flush(); this.progressOutput.sync(); } catch (IOException e) { LOG.error("Error in write progress log:", e); } }
@Override public MASTER_RESULT compute(MasterContext<MASTER_RESULT, WORKER_RESULT> context) { if(context.isFirstIteration()) { this.setup(context); context.addCompletionCallBack(new MasterCompletionCallBack<MASTER_RESULT, WORKER_RESULT>() { @Override public void callback(MasterContext<MASTER_RESULT, WORKER_RESULT> context) { cleanup(context); } }); } return doCompute(context); }
@SuppressWarnings("deprecation") private void updateProgressLog(final MasterContext<DTMasterParams, DTWorkerParams> context) { int currentIteration = context.getCurrentIteration(); if(context.isFirstIteration()) {
@Override public LogisticRegressionParams compute(MasterContext<LogisticRegressionParams, LogisticRegressionParams> context) { if(context.isFirstIteration()) { init(context); weights = new double[this.inputNum + 1]; for(int i = 0; i < weights.length; i++) { weights[i] = RANDOM.nextDouble(); } } else { double[] gradients = new double[this.inputNum + 1]; double sumError = 0.0d; int size = 0; for(LogisticRegressionParams param: context.getWorkerResults()) { if(param != null) { for(int i = 0; i < gradients.length; i++) { gradients[i] += param.getParameters()[i]; } sumError += param.getError(); } size++; } for(int i = 0; i < weights.length; i++) { weights[i] -= learnRate * gradients[i]; } LOG.debug("DEBUG: Weights: {}", Arrays.toString(this.weights)); LOG.info("Iteration {} with error {}", context.getCurrentIteration(), sumError / size); } return new LogisticRegressionParams(weights); }
@Override public LinearRegressionParams compute(MasterContext<LinearRegressionParams, LinearRegressionParams> context) { if(context.isFirstIteration()) { init(context); weights = new double[this.inputNum + 1]; for(int i = 0; i < weights.length; i++) { weights[i] = RANDOM.nextDouble(); } } else { double[] gradients = new double[this.inputNum + 1]; double sumError = 0.0d; int size = 0; for(LinearRegressionParams param: context.getWorkerResults()) { if(param != null) { for(int i = 0; i < gradients.length; i++) { gradients[i] += param.getParameters()[i]; } sumError += param.getError(); } size++; } for(int i = 0; i < weights.length; i++) { weights[i] -= learnRate * gradients[i]; } LOG.info("DEBUG: Weights: {}", Arrays.toString(this.weights)); LOG.info("Iteration {} with error {}", context.getCurrentIteration(), sumError / size); } return new LinearRegressionParams(weights); }
@Override public LinearRegressionParams doCompute(MasterContext<LinearRegressionParams, LinearRegressionParams> context) { if(context.isFirstIteration()) { initWeights(); } else { double[] gradients = new double[this.inputNum + 1]; double sumError = 0.0d; int size = 0; for(LinearRegressionParams param: context.getWorkerResults()) { if(param != null) { for(int i = 0; i < gradients.length; i++) { gradients[i] += param.getParameters()[i]; } sumError += param.getError(); } size++; } for(int i = 0; i < weights.length; i++) { weights[i] -= learnRate * gradients[i]; } LOG.info("DEBUG: Weights: {}", Arrays.toString(this.weights)); LOG.info("Iteration {} with error {}", context.getCurrentIteration(), sumError / size); } return new LinearRegressionParams(weights); }
@Override public LogisticRegressionParams doCompute(MasterContext<LogisticRegressionParams, LogisticRegressionParams> context) { if(context.isFirstIteration()) { initWeights(); } else { double[] gradients = new double[this.inputNum + 1]; double sumError = 0.0d; int size = 0; for(LogisticRegressionParams param: context.getWorkerResults()) { if(param != null) { for(int i = 0; i < gradients.length; i++) { gradients[i] += param.getParameters()[i]; } sumError += param.getError(); } size++; } for(int i = 0; i < weights.length; i++) { weights[i] -= learnRate * gradients[i]; } LOG.debug("DEBUG: Weights: {}", Arrays.toString(this.weights)); LOG.info("Iteration {} with error {}", context.getCurrentIteration(), sumError / size); } return new LogisticRegressionParams(weights); }
if(!context.isFirstIteration()) { NNParams params = context.getMasterResult(); if(params != null && params.getWeights() != null) {
if(!context.isFirstIteration()) { LogisticRegressionParams lastMasterResult = context.getMasterResult(); if(lastMasterResult != null && lastMasterResult.getParameters() != null) {
if(context.isFirstIteration()) { if(this.isRF) {
if(!context.isFirstIteration()) { LogisticRegressionParams lastMasterResult = context.getMasterResult(); if(lastMasterResult != null && lastMasterResult.getParameters() != null) { if(context.isFirstIteration()) { if(this.isContinuousEnabled) { return initOrRecoverParams(context);
@Override public void init(MasterContext<LogisticRegressionParams, LogisticRegressionParams> context) { this.inputNum = NumberFormatUtils.getInt(LogisticRegressionContants.LR_INPUT_NUM, LogisticRegressionContants.LR_INPUT_DEFAULT_NUM); this.learnRate = NumberFormatUtils.getDouble(LogisticRegressionContants.LR_LEARNING_RATE, LogisticRegressionContants.LR_LEARNING_DEFAULT_RATE); // if not first iteration, means this is fail-over and should be recovered for state in master. if(!context.isFirstIteration()) { LogisticRegressionParams masterResult = context.getMasterResult(); if(masterResult != null && masterResult.getParameters() != null) { this.weights = masterResult.getParameters(); } else { initWeights(); } } }
@Override public void init(MasterContext<LinearRegressionParams, LinearRegressionParams> context) { this.inputNum = NumberFormatUtils.getInt(LinearRegressionContants.LR_INPUT_NUM, LinearRegressionContants.LR_INPUT_DEFAULT_NUM); this.learnRate = NumberFormatUtils.getDouble(LinearRegressionContants.LR_LEARNING_RATE, LinearRegressionContants.LR_LEARNING_DEFAULT_RATE); // not initialized and not first iteration, should be fault tolerence, recover state in LogisticRegressionMaster if(!context.isFirstIteration()) { LinearRegressionParams lastMasterResult = context.getMasterResult(); if(lastMasterResult != null && lastMasterResult.getParameters() != null) { // recover state in current master computable and return to workers this.weights = lastMasterResult.getParameters(); } else { // no weights, restarted from the very beginning, this may not happen initWeights(); } } }
@Override public DTMasterParams doCompute(MasterContext<DTMasterParams, DTWorkerParams> context) { if(context.isFirstIteration()) { return buildInitialMasterParams();
@Override public NNParams doCompute(MasterContext<NNParams, NNParams> context) { if(context.isFirstIteration()) {
if(context.isFirstIteration() || context.getCurrentIteration() == context.getTotalIteration()) {