public SimpleMatrix getAnaphorEmbedding(SimpleMatrix mentionEmbedding) { return anaphorMatrix.mult(mentionEmbedding); }
public SimpleMatrix getAntecedentEmbedding(SimpleMatrix mentionEmbedding) { return antecedentMatrix.mult(mentionEmbedding); }
private static double score(SimpleMatrix features, List<SimpleMatrix> weights) { for (int i = 0; i < weights.size(); i += 2) { features = weights.get(i).mult(features).plus(weights.get(i + 1)); if (weights.get(i).numRows() > 1) { features = NeuralUtils.elementwiseApplyReLU(features); } } return features.elementSum(); }
/** * Returns a column vector where each entry is the nth bilinear * product of the nth slices of the two tensors. */ public SimpleMatrix bilinearProducts(SimpleMatrix in) { if (in.numCols() != 1) { throw new AssertionError("Expected a column vector"); } if (in.numRows() != numCols) { throw new AssertionError("Number of rows in the input does not match number of columns in tensor"); } if (numRows != numCols) { throw new AssertionError("Can only perform this operation on a SimpleTensor with square slices"); } SimpleMatrix inT = in.transpose(); SimpleMatrix out = new SimpleMatrix(numSlices, 1); for (int slice = 0; slice < numSlices; ++slice) { double result = inT.mult(slices[slice]).mult(in).get(0); out.set(slice, result); } return out; }
/** * Compute dot product between two vectors. */ public static double dot(SimpleMatrix vector1, SimpleMatrix vector2){ if(vector1.numRows()==1){ // vector1: row vector, assume that vector2 is a row vector too return vector1.mult(vector2.transpose()).get(0); } else if (vector1.numCols()==1){ // vector1: col vector, assume that vector2 is also a column vector. return vector1.transpose().mult(vector2).get(0); } else { throw new AssertionError("Error in neural.Utils.dot: vector1 is a matrix " + vector1.numRows() + " x " + vector1.numCols()); } }
public Matrix getWorldMatrix(int index) { if (index == this.size() - 1) { return new Matrix(localBonesMatrices.get(this.size() - 1)); } SimpleMatrix result = this.getWorldMatrix(index + 1); result = result.mult(localBonesMatrices.get(index)); return new Matrix(result); } }
public double getPairwiseScore(SimpleMatrix antecedentEmbedding, SimpleMatrix anaphorEmbedding, SimpleMatrix pairFeatures) { SimpleMatrix firstLayerOutput = NeuralUtils.elementwiseApplyReLU( antecedentEmbedding .plus(anaphorEmbedding) .plus(pairFeaturesMatrix.mult(pairFeatures)) .plus(pairwiseFirstLayerBias)); return score(firstLayerOutput, pairwiseModel); }
public void setWorldTransform(BoneContext bone, DTransform transform) { int index = this.indexOf(bone); Matrix boneMatrix = transform.toMatrix(); if (index < this.size() - 1) { // computing the current bone local transform Matrix parentWorldMatrix = this.getWorldMatrix(index + 1); SimpleMatrix m = parentWorldMatrix.invert().mult(boneMatrix); boneMatrix = new Matrix(m); } localBonesMatrices.set(index, boneMatrix); }
public BonesChain(Bone bone, boolean useTail, int bonesAffected, Collection<Long> alteredOmas, BlenderContext blenderContext) { if (bone != null) { ConstraintHelper constraintHelper = blenderContext.getHelper(ConstraintHelper.class); if (!useTail) { bone = bone.getParent(); } while (bone != null && (bonesAffected <= 0 || this.size() < bonesAffected)) { BoneContext boneContext = blenderContext.getBoneContext(bone); this.add(boneContext); alteredOmas.add(boneContext.getBoneOma()); Transform transform = constraintHelper.getTransform(boneContext.getArmatureObjectOMA(), boneContext.getBone().getName(), Space.CONSTRAINT_SPACE_WORLD); localBonesMatrices.add(new DTransform(transform).toMatrix()); bone = bone.getParent(); } if(localBonesMatrices.size() > 0) { // making the matrices describe the local transformation Matrix parentWorldMatrix = localBonesMatrices.get(localBonesMatrices.size() - 1); for(int i=localBonesMatrices.size() - 2;i>=0;--i) { SimpleMatrix m = parentWorldMatrix.invert().mult(localBonesMatrices.get(i)); parentWorldMatrix = localBonesMatrices.get(i); localBonesMatrices.set(i, new Matrix(m)); } } } }
private static SimpleTensor getTensorGradient(SimpleMatrix deltaFull, SimpleMatrix leftVector, SimpleMatrix rightVector) { int size = deltaFull.getNumElements(); SimpleTensor Wt_df = new SimpleTensor(size*2, size*2, size); // TODO: combine this concatenation with computeTensorDeltaDown? SimpleMatrix fullVector = NeuralUtils.concatenate(leftVector, rightVector); for (int slice = 0; slice < size; ++slice) { Wt_df.setSlice(slice, fullVector.scale(deltaFull.get(slice)).mult(fullVector.transpose())); } return Wt_df; }
private static SimpleMatrix computeTensorDeltaDown(SimpleMatrix deltaFull, SimpleMatrix leftVector, SimpleMatrix rightVector, SimpleMatrix W, SimpleTensor Wt) { SimpleMatrix WTDelta = W.transpose().mult(deltaFull); SimpleMatrix WTDeltaNoBias = WTDelta.extractMatrix(0, deltaFull.numRows() * 2, 0, 1); int size = deltaFull.getNumElements(); SimpleMatrix deltaTensor = new SimpleMatrix(size*2, 1); SimpleMatrix fullVector = NeuralUtils.concatenate(leftVector, rightVector); for (int slice = 0; slice < size; ++slice) { SimpleMatrix scaledFullVector = fullVector.scale(deltaFull.get(slice)); deltaTensor = deltaTensor.plus(Wt.getSlice(slice).plus(Wt.getSlice(slice).transpose()).mult(scaledFullVector)); } return deltaTensor.plus(WTDeltaNoBias); }
@SuppressWarnings("unchecked") public Matrix pseudoinverse(double lambda) { SimpleSVD<SimpleMatrix> simpleSVD = this.svd(); SimpleMatrix U = simpleSVD.getU(); SimpleMatrix S = simpleSVD.getW(); SimpleMatrix V = simpleSVD.getV(); int N = Math.min(this.numRows(),this.numCols()); double maxSingular = 0; for( int i = 0; i < N; ++i ) { if( S.get(i, i) > maxSingular ) { maxSingular = S.get(i, i); } } double tolerance = FastMath.DBL_EPSILON * Math.max(this.numRows(),this.numCols()) * maxSingular; for(int i=0;i<Math.min(S.numRows(), S.numCols());++i) { double a = S.get(i, i); if(a <= tolerance) { a = 0; } else { a = a/(a * a + lambda * lambda); } S.set(i, i, a); } return new Matrix(V.mult(S.transpose()).mult(U.transpose())); }
SimpleMatrix currentVector = W.mult(childVec); currentVector = NeuralUtils.elementwiseApplyTanh(currentVector); nodeVectors.put(tree, currentVector);
SimpleMatrix tensorIn = NeuralUtils.concatenate(leftVector, rightVector); SimpleMatrix tensorOut = tensor.bilinearProducts(tensorIn); nodeVector = NeuralUtils.elementwiseApplyTanh(W.mult(childrenVector).plus(tensorOut)); } else { nodeVector = NeuralUtils.elementwiseApplyTanh(W.mult(childrenVector)); SimpleMatrix predictions = NeuralUtils.softmax(classification.mult(NeuralUtils.concatenateWithBias(nodeVector)));
SimpleMatrix WTdelta = W.transpose().mult(deltaCurrent); childrenVector = concatenateContextWords(childrenVector, tree.getSpan(), words); SimpleMatrix W_df = deltaCurrent.mult(childrenVector.transpose()); binaryW_dfs.put(leftLabel, rightLabel, binaryW_dfs.get(leftLabel, rightLabel).plus(W_df)); childVectorWithBias = concatenateContextWords(childVectorWithBias, tree.getSpan(), words); SimpleMatrix W_df = deltaCurrent.mult(childVectorWithBias.transpose());
SimpleMatrix localCD = deltaClass.mult(NeuralUtils.concatenateWithBias(currentVector).transpose()); SimpleMatrix deltaFromClass = model.getUnaryClassification(category).transpose().mult(deltaClass); deltaFromClass = deltaFromClass.extractMatrix(0, model.op.numHid, 0, 1).elementMult(currentVectorDerivative); SimpleMatrix deltaFull = deltaFromClass.plus(deltaUp); SimpleMatrix deltaFromClass = model.getBinaryClassification(leftCategory, rightCategory).transpose().mult(deltaClass); deltaFromClass = deltaFromClass.extractMatrix(0, model.op.numHid, 0, 1).elementMult(currentVectorDerivative); SimpleMatrix deltaFull = deltaFromClass.plus(deltaUp); SimpleMatrix rightVector = RNNCoreAnnotations.getNodeVector(tree.children()[1]); SimpleMatrix childrenVector = NeuralUtils.concatenateWithBias(leftVector, rightVector); SimpleMatrix W_df = deltaFull.mult(childrenVector.transpose()); binaryTD.put(leftCategory, rightCategory, binaryTD.get(leftCategory, rightCategory).plus(W_df)); SimpleMatrix deltaDown; deltaDown = computeTensorDeltaDown(deltaFull, leftVector, rightVector, model.getBinaryTransform(leftCategory, rightCategory), model.getBinaryTensor(leftCategory, rightCategory)); } else { deltaDown = model.getBinaryTransform(leftCategory, rightCategory).transpose().mult(deltaFull);
/** * @param origin * @return Local (ENU) coordinates */ public void computeLocal(Coordinates target) { if(this.geod==null) computeGeodetic(); SimpleMatrix R = rotationMatrix(this); enu = R.mult(target.minusXYZ(this)); }
private static void addTask(DenseMatrix64F taskJacobian, DenseMatrix64F taskObjective, DenseMatrix64F taskWeight, DenseMatrix64F hToModify, DenseMatrix64F fToModify) { SimpleMatrix J = new SimpleMatrix(taskJacobian); SimpleMatrix W = new SimpleMatrix(taskWeight); SimpleMatrix b = new SimpleMatrix(taskObjective); SimpleMatrix H = J.transpose().mult(W).mult(J); SimpleMatrix f = J.transpose().mult(W).mult(b); CommonOps.add(hToModify, H.getMatrix(), hToModify); CommonOps.add(fToModify, f.getMatrix(), fToModify); } }
public static double quality( DenseMatrix64F orig , DenseMatrix64F U , DenseMatrix64F W , DenseMatrix64F Vt ) { SimpleMatrix _U = SimpleMatrix.wrap(U); SimpleMatrix _W = SimpleMatrix.wrap(W); SimpleMatrix _Vt = SimpleMatrix.wrap(Vt); SimpleMatrix foundA = _U.mult(_W).mult(_Vt); return SpecializedOps.diffNormF(orig,foundA.getMatrix())/foundA.normF(); }
/** * Compute dot product between two vectors. */ public static double dot(SimpleMatrix vector1, SimpleMatrix vector2){ if(vector1.numRows()==1){ // vector1: row vector, assume that vector2 is a row vector too return vector1.mult(vector2.transpose()).get(0); } else if (vector1.numCols()==1){ // vector1: col vector, assume that vector2 is also a column vector. return vector1.transpose().mult(vector2).get(0); } else { throw new AssertionError("Error in neural.Utils.dot: vector1 is a matrix " + vector1.numRows() + " x " + vector1.numCols()); } }