public static double[] parallelInitialize(int size) { double[] values = new double[size]; Arrays.parallelSetAll(values, i -> i); return values; } // END parallelInitialize
public static void main(String[] args) { long[] arrayOfLong = new long[20000]; Arrays.parallelSetAll(arrayOfLong, index -> ThreadLocalRandom.current().nextInt(1000000)); Arrays.stream(arrayOfLong).limit(10).forEach( i -> System.out.print(i + " ")); System.out.println(); Arrays.parallelSort(arrayOfLong); Arrays.stream(arrayOfLong).limit(10).forEach( i -> System.out.print(i + " ")); System.out.println(); } }
@Override public void clear() { Arrays.parallelSetAll(elementData, i->null); size = 0; }
@Override public void clear() { Arrays.parallelSetAll(elementData, i->null); size = 0; }
Arrays.parallelSetAll(array, idx -> { try { return someFunc(idx/(N*N), (idx/N)%N, idx % N); } catch(MyCheckedException ex) { throw new RuntimeException(ex); } });
public static void main(String[] args) { System.out.println(Summing3.CHECK); Long[] aL = new Long[Summing3.SZ+1]; Arrays.parallelSetAll(aL, i -> (long)i); Summing.timeTest("Long Parallel", Summing3.CHECK, () -> Arrays.stream(aL) .parallel() .reduce(0L,Long::sum)); } }
@Benchmark public void parallelSetAll() { Arrays.parallelSetAll(la, n -> n); } }
@Benchmark public void parallelSetAll() { Arrays.parallelSetAll(la, n -> n); } }
/** * Set tensor. * * @param f the f * @return the tensor */ @Nonnull public Tensor set(@Nonnull final IntToDoubleFunction f) { Arrays.parallelSetAll(getData(), f); return this; }
double[] array=new double[n+1]; Arrays.parallelSetAll(array, index -> index==0? 1: x/index); Arrays.parallelPrefix(array, (a,b) -> a*b); // we could do the last step as prefix op as well: //Arrays.parallelPrefix(array, Double::sum); //double exp=array[n]; // but a straight forward summing is better: double exp=Arrays.stream(array).parallel().sum();
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public FullyConnectedReferenceLayer set(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(weights.getData(), i -> f.getAsDouble()); return this; }
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public FullyConnectedReferenceLayer set(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(weights.getData(), i -> f.getAsDouble()); return this; }
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public ReLuActivationLayer setWeights(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(weights.getData(), i -> f.getAsDouble()); return this; }
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public ReLuActivationLayer setWeights(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(weights.getData(), i -> f.getAsDouble()); return this; }
@Benchmark public void parallelSetAll() { Arrays.parallelSetAll(la, n -> f(n)); } }
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public FullyConnectedLayer setWeights(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(getWeights().getData(), i -> f.getAsDouble()); return this; }
/** * Sets weights. * * @param f the f * @return the weights */ @Nonnull public FullyConnectedLayer setWeights(@Nonnull final DoubleSupplier f) { Arrays.parallelSetAll(getWeights().getData(), i -> f.getAsDouble()); return this; }
static void intArray() { int[] ia = new int[SIZE]; Arrays.setAll(ia, new Rand.Pint()::get); Arrays.parallelSetAll(ia, new Rand.Pint()::get); } static void longArray() {
@Override public double[] getCentrality() { final double[] cc = new double[nodeCount]; Arrays.parallelSetAll(cc, i -> centrality(farness.get(i), component.get(i), nodeCount, wassermanFaust)); return cc; }
static void longArray() { long[] la = new long[SIZE]; Arrays.setAll(la, new Rand.Plong()::get); Arrays.parallelSetAll(la, new Rand.Plong()::get); } public static void main(String[] args) {