/** * This function will calculate what the change in weights should be * and also update them. * @param node The node to update the weights for. * @param learn The learning rate to use. * @param momentum The momentum to use. */ public void updateWeights(NeuralNode node, double learn, double momentum) { NeuralConnection[] inputs = node.getInputs(); double[] cWeights = node.getChangeInWeights(); double[] weights = node.getWeights(); double learnTimesError = 0; learnTimesError = learn * node.errorValue(false); double c = learnTimesError + momentum * cWeights[0]; weights[0] += c; cWeights[0] = c; int stopValue = node.getNumInputs() + 1; for (int noa = 1; noa < stopValue; noa++) { c = learnTimesError * inputs[noa-1].outputValue(false); c += momentum * cWeights[noa]; weights[noa] += c; cWeights[noa] = c; } }
/** * This function calculates what the error value should be. * @param node The node to calculate the error for. * @return The error. */ public double errorValue(NeuralNode node) { //then calculate the error. NeuralConnection[] outputs = node.getOutputs(); int[] oNums = node.getOutputNums(); double error = 0; for (int noa = 0; noa < node.getNumOutputs(); noa++) { error += outputs[noa].errorValue(true) * outputs[noa].weightValue(oNums[noa]); } double value = node.outputValue(false); error *= value * (1 - value); return error; }
/** * This function calculates what the output value should be. * @param node The node to calculate the value for. * @return The value. */ public double outputValue(NeuralNode node) { double[] weights = node.getWeights(); NeuralConnection[] inputs = node.getInputs(); double value = weights[0]; for (int noa = 0; noa < node.getNumInputs(); noa++) { value += inputs[noa].outputValue(true) * weights[noa+1]; } return value; }
NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX(.5 / (num) * noa + .25); temp.setY((nob + 1.0) / (val + 1)); addNode(temp); if (noa > 0) {
weights = con.getWeights(); inputs = con.getInputs(); if (con.getMethod() instanceof SigmoidUnit) { model.append("Sigmoid "); } else if (con.getMethod() instanceof LinearUnit) { model.append("Linear "); model.append("Node " + con.getId() + "\n Inputs Weights\n"); model.append(" Threshold " + weights[0] + "\n"); for (int nob = 1; nob < con.getNumInputs() + 1; nob++) { if ((inputs[nob - 1].getType() & NeuralConnection.PURE_INPUT) == NeuralConnection.PURE_INPUT) { model.append(" Attrib "
/** * This function calculates what the error value should be. * @param node The node to calculate the error for. * @return The error. */ public double errorValue(NeuralNode node) { //then calculate the error. NeuralConnection[] outputs = node.getOutputs(); int[] oNums = node.getOutputNums(); double error = 0; for (int noa = 0; noa < node.getNumOutputs(); noa++) { error += outputs[noa].errorValue(true) * outputs[noa].weightValue(oNums[noa]); } return error; }
NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX(.5 / (num) * noa + .25); temp.setY((nob + 1.0) / (val + 1)); addNode(temp); if (noa > 0) {
weights = con.getWeights(); inputs = con.getInputs(); if (con.getMethod() instanceof SigmoidUnit) { model.append("Sigmoid "); } else if (con.getMethod() instanceof LinearUnit) { model.append("Linear "); model.append("Node " + con.getId() + "\n Inputs Weights\n"); model.append(" Threshold " + weights[0] + "\n"); for (int nob = 1; nob < con.getNumInputs() + 1; nob++) { if ((inputs[nob - 1].getType() & NeuralConnection.PURE_INPUT) == NeuralConnection.PURE_INPUT) { model.append(" Attrib "
/** * This function calculates what the output value should be. * @param node The node to calculate the value for. * @return The value. */ public double outputValue(NeuralNode node) { double[] weights = node.getWeights(); NeuralConnection[] inputs = node.getInputs(); double value = weights[0]; for (int noa = 0; noa < node.getNumInputs(); noa++) { value += inputs[noa].outputValue(true) * weights[noa+1]; } return value; }
/** * This function calculates what the error value should be. * @param node The node to calculate the error for. * @return The error. */ public double errorValue(NeuralNode node) { //then calculate the error. NeuralConnection[] outputs = node.getOutputs(); int[] oNums = node.getOutputNums(); double error = 0; for (int noa = 0; noa < node.getNumOutputs(); noa++) { error += outputs[noa].errorValue(true) * outputs[noa].weightValue(oNums[noa]); } return error; }
/** * This function will calculate what the change in weights should be * and also update them. * @param node The node to update the weights for. * @param learn The learning rate to use. * @param momentum The momentum to use. */ public void updateWeights(NeuralNode node, double learn, double momentum) { NeuralConnection[] inputs = node.getInputs(); double[] cWeights = node.getChangeInWeights(); double[] weights = node.getWeights(); double learnTimesError = 0; learnTimesError = learn * node.errorValue(false); double c = learnTimesError + momentum * cWeights[0]; weights[0] += c; cWeights[0] = c; int stopValue = node.getNumInputs() + 1; for (int noa = 1; noa < stopValue; noa++) { c = learnTimesError * inputs[noa-1].outputValue(false); c += momentum * cWeights[noa]; weights[noa] += c; cWeights[noa] = c; } }
NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX((double) e.getX() / w); temp.setY((double) e.getY() / h); tmp.add(temp); addNode(temp);
/** * This function calculates what the output value should be. * @param node The node to calculate the value for. * @return The value. */ public double outputValue(NeuralNode node) { double[] weights = node.getWeights(); NeuralConnection[] inputs = node.getInputs(); double value = weights[0]; for (int noa = 0; noa < node.getNumInputs(); noa++) { value += inputs[noa].outputValue(true) * weights[noa+1]; } //this I got from the Neural Network faq to combat overflow //pretty simple solution really :) if (value < -45) { value = 0; } else if (value > 45) { value = 1; } else { value = 1 / (1 + Math.exp(-value)); } return value; }
/** * This function calculates what the error value should be. * @param node The node to calculate the error for. * @return The error. */ public double errorValue(NeuralNode node) { //then calculate the error. NeuralConnection[] outputs = node.getOutputs(); int[] oNums = node.getOutputNums(); double error = 0; for (int noa = 0; noa < node.getNumOutputs(); noa++) { error += outputs[noa].errorValue(true) * outputs[noa].weightValue(oNums[noa]); } double value = node.outputValue(false); error *= value * (1 - value); return error; }
/** * This function will calculate what the change in weights should be * and also update them. * @param node The node to update the weights for. * @param learn The learning rate to use. * @param momentum The momentum to use. */ public void updateWeights(NeuralNode node, double learn, double momentum) { NeuralConnection[] inputs = node.getInputs(); double[] cWeights = node.getChangeInWeights(); double[] weights = node.getWeights(); double learnTimesError = 0; learnTimesError = learn * node.errorValue(false); double c = learnTimesError + momentum * cWeights[0]; weights[0] += c; cWeights[0] = c; int stopValue = node.getNumInputs() + 1; for (int noa = 1; noa < stopValue; noa++) { c = learnTimesError * inputs[noa-1].outputValue(false); c += momentum * cWeights[noa]; weights[noa] += c; cWeights[noa] = c; } }
NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX((double) e.getX() / w); temp.setY((double) e.getY() / h); tmp.add(temp); addNode(temp);
/** * This function calculates what the output value should be. * @param node The node to calculate the value for. * @return The value. */ public double outputValue(NeuralNode node) { double[] weights = node.getWeights(); NeuralConnection[] inputs = node.getInputs(); double value = weights[0]; for (int noa = 0; noa < node.getNumInputs(); noa++) { value += inputs[noa].outputValue(true) * weights[noa+1]; } //this I got from the Neural Network faq to combat overflow //pretty simple solution really :) if (value < -45) { value = 0; } else if (value > 45) { value = 1; } else { value = 1 / (1 + Math.exp(-value)); } return value; }
/** * This function will calculate what the change in weights should be * and also update them. * @param node The node to update the weights for. * @param learn The learning rate to use. * @param momentum The momentum to use. */ public void updateWeights(NeuralNode node, double learn, double momentum) { NeuralConnection[] inputs = node.getInputs(); double[] cWeights = node.getChangeInWeights(); double[] weights = node.getWeights(); double learnTimesError = 0; learnTimesError = learn * node.errorValue(false); double c = learnTimesError + momentum * cWeights[0]; weights[0] += c; cWeights[0] = c; int stopValue = node.getNumInputs() + 1; for (int noa = 1; noa < stopValue; noa++) { c = learnTimesError * inputs[noa-1].outputValue(false); c += momentum * cWeights[noa]; weights[noa] += c; cWeights[noa] = c; } }
/** * This creates the required output units. */ private void setupOutputs() throws Exception { m_outputs = new NeuralEnd[m_numClasses]; for (int noa = 0; noa < m_numClasses; noa++) { if (m_numeric) { m_outputs[noa] = new NeuralEnd(m_instances.classAttribute().name()); } else { m_outputs[noa] = new NeuralEnd(m_instances.classAttribute().value(noa)); } m_outputs[noa].setX(.9); m_outputs[noa].setY((noa + 1.0) / (m_numClasses + 1)); m_outputs[noa].setLink(false, noa); NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX(.75); temp.setY((noa + 1.0) / (m_numClasses + 1)); addNode(temp); NeuralConnection.connect(temp, m_outputs[noa]); } }
/** * This creates the required output units. */ private void setupOutputs() throws Exception { m_outputs = new NeuralEnd[m_numClasses]; for (int noa = 0; noa < m_numClasses; noa++) { if (m_numeric) { m_outputs[noa] = new NeuralEnd(m_instances.classAttribute().name()); } else { m_outputs[noa] = new NeuralEnd(m_instances.classAttribute().value(noa)); } m_outputs[noa].setX(.9); m_outputs[noa].setY((noa + 1.0) / (m_numClasses + 1)); m_outputs[noa].setLink(false, noa); NeuralNode temp = new NeuralNode(String.valueOf(m_nextId), m_random, m_sigmoidUnit); m_nextId++; temp.setX(.75); temp.setY((noa + 1.0) / (m_numClasses + 1)); addNode(temp); NeuralConnection.connect(temp, m_outputs[noa]); } }