/** * Sigmoid function * * @param ndArray * @return */ public static INDArray sigmoid(INDArray ndArray) { return sigmoid(ndArray, true); }
/** * Sigmoid function * * @param ndArray * @return */ public static INDArray sigmoidDerivative(INDArray ndArray) { return sigmoid(ndArray, true); }
public static void main(String[] args) { INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6}); INDArray nd2 = Nd4j.create(new float[]{15,16,17,18,19,20,21,22,23,24,25,26,27,28}, new int[]{2, 7}); INDArray ndv; // a placeholder variable to print out and leave the original data unchanged //this normalizes data and helps activate artificial neurons in deep-learning nets and assigns it to var ndv ndv = sigmoid(nd); System.out.println(ndv); //this gives you absolute value ndv = abs(nd); System.out.println(ndv); //a hyperbolic function to transform data much like sigmoid. ndv = tanh(nd); System.out.println(ndv); // ndv = hardTanh(nd); // System.out.println(ndv); //exponentiation ndv = exp(nd); System.out.println(ndv); //square root ndv = sqrt(nd); System.out.println(ndv); } }
@Override public DoubleTensor sigmoidInPlace() { Transforms.sigmoid(tensor, false); return this; }
/** * Sigmoid function * * @param ndArray * @return */ public static INDArray sigmoid(INDArray ndArray) { return sigmoid(ndArray, Nd4j.copyOnOps); }
/** * Sigmoid function * * @param ndArray * @return */ public static INDArray sigmoidDerivative(INDArray ndArray) { return sigmoid(ndArray, Nd4j.copyOnOps); }
/** * Calculates the activation of the visible : * sigmoid(v * W + hbias) * @param v the visible layer * @return the approximated activations of the visible layer */ public INDArray propUp(INDArray v, boolean training) { INDArray preSig = preOutput(v, training); switch (layerConf().getHiddenUnit()) { case IDENTITY: return preSig; case BINARY: return sigmoid(preSig); case GAUSSIAN: Distribution dist = Nd4j.getDistributions().createNormal(preSig, 1); preSig = dist.sample(preSig.shape()); return preSig; case RECTIFIED: preSig = max(preSig, 0.0); return preSig; case SOFTMAX: return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", preSig)); default: throw new IllegalStateException( "Hidden unit type should either be binary, gaussian, or rectified linear " + layerId()); } }
/** * Calculates the activation of the hidden: * activation(h * W + vbias) * @param h the hidden layer * @return the approximated output of the hidden layer */ public INDArray propDown(INDArray h) { INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY).transpose(); INDArray vBias = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY); INDArray vMean = h.mmul(W).addiRowVector(vBias); switch (layerConf().getVisibleUnit()) { case IDENTITY: return vMean; case BINARY: return sigmoid(vMean); case GAUSSIAN: Distribution dist = Nd4j.getDistributions().createNormal(vMean, 1); vMean = dist.sample(vMean.shape()); return vMean; case LINEAR: return vMean; case SOFTMAX: return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", vMean)); default: throw new IllegalStateException("Visible unit type should either be binary or gaussian " + layerId()); } }
INDArray sigH1Mean = sigmoid(hProb);