/** {@inheritDoc} */ @Override public void serialize(IgniteBiTuple t, JsonGenerator gen, SerializerProvider ser) throws IOException { gen.writeStartObject(); gen.writeObjectField("key", t.getKey()); gen.writeObjectField("value", t.getValue()); gen.writeEndObject(); } };
/** {@inheritDoc} */ @Override public void apply(IgniteBiTuple<?, ?> entry) { streamer.addData(entry.getKey(), entry.getValue()); updateCnt++; }
/** * Convert Field Name-Value pair to string. * * @param kv Field name and value. * @param pretty Use pretty mode. */ @NotNull private String fieldToString(IgniteBiTuple<String, Object> kv, boolean pretty) { StringBuilder builder = new StringBuilder(pretty ? "\t" : "") .append(kv.getKey()).append(" = ["); if (kv.getValue() instanceof List) { List list = (List)kv.getValue(); builder .append(pretty ? "\n" : "") .append(list.stream() .map(x -> (pretty ? "\t\t" : "") + x) .collect(Collectors.joining(pretty ? ",\n" : ", "))) .append(pretty ? "\n\t]" : "]"); } else { builder.append(kv.getValue().toString()) .append("]"); } return builder.toString(); } }
/** * Merge left and right statistics from partitions. * * @param left Left. * @param right Right. * @return merged value. */ private IgniteBiTuple<Double, Long> reduce(IgniteBiTuple<Double, Long> left, IgniteBiTuple<Double, Long> right) { if (left == null) { if (right != null) return right; else return new IgniteBiTuple<>(0.0, 0L); } if (right == null) return left; return new IgniteBiTuple<>( left.getKey() + right.getKey(), right.getValue() + left.getValue() ); } }
/** * Set IGFS modes for particular paths. * * @param modes Modes. */ @SafeVarargs final void pathModes(IgniteBiTuple<String, IgfsMode>... modes) { assert modes != null; pathModes = new LinkedHashMap<>(modes.length, 1.0f); for (IgniteBiTuple<String, IgfsMode> mode : modes) pathModes.put(mode.getKey(), mode.getValue()); }
/** {@inheritDoc} */ @Override public Double computeMeanErrorOnDataset(Dataset<EmptyContext, ? extends FeatureMatrixWithLabelsOnHeapData> dataset, ModelsComposition mdl) { IgniteBiTuple<Double, Long> sumAndCnt = dataset.compute( partition -> computeStatisticOnPartition(mdl, partition), this::reduce ); if(sumAndCnt == null || sumAndCnt.getValue() == 0) return Double.NaN; return sumAndCnt.getKey() / sumAndCnt.getValue(); }
/** * @param metaStorage Meta storage. * @param size Size. */ private Map<String, byte[]> putDataToMetaStorage(MetaStorage metaStorage, int size, int from) throws IgniteCheckedException { Map<String, byte[]> res = new HashMap<>(); for (Iterator<IgniteBiTuple<String, byte[]>> it = generateTestData(size, from).iterator(); it.hasNext(); ) { IgniteBiTuple<String, byte[]> d = it.next(); metaStorage.writeRaw(d.getKey(), d.getValue()); res.put(d.getKey(), d.getValue()); } return res; }
/** * Read map. * * @param reader Reader. * @param readClo Reader closure. * @return Map. */ public static <K, V> Map<K, V> readMap(BinaryRawReaderEx reader, @Nullable PlatformReaderBiClosure<K, V> readClo) { int cnt = reader.readInt(); Map<K, V> map = U.newHashMap(cnt); if (readClo == null) { for (int i = 0; i < cnt; i++) map.put((K)reader.readObjectDetached(), (V)reader.readObjectDetached()); } else { for (int i = 0; i < cnt; i++) { IgniteBiTuple<K, V> entry = readClo.read(reader); map.put(entry.getKey(), entry.getValue()); } } return map; }
/** * Read linked map. * * @param reader Reader. * @param readClo Reader closure. * @return Map. */ public static <K, V> Map<K, V> readLinkedMap(BinaryRawReaderEx reader, @Nullable PlatformReaderBiClosure<K, V> readClo) { int cnt = reader.readInt(); Map<K, V> map = U.newLinkedHashMap(cnt); if (readClo == null) { for (int i = 0; i < cnt; i++) map.put((K)reader.readObjectDetached(), (V)reader.readObjectDetached()); } else { for (int i = 0; i < cnt; i++) { IgniteBiTuple<K, V> entry = readClo.read(reader); map.put(entry.getKey(), entry.getValue()); } } return map; }
/** */ private KeyCacheObject toKey(EnlistOperation op, Object cur) { KeyCacheObject key = cctx.toCacheKeyObject(op.isDeleteOrLock() ? cur : ((IgniteBiTuple)cur).getKey()); if (key.partition() == -1) key.partition(cctx.affinity().partition(key)); return key; }
@Override public void run() { Deque<IgniteBiTuple<Integer, Path>> queue = new ArrayDeque<>(); queue.add(F.t(0, dir)); U.awaitQuiet(barrier); while (!queue.isEmpty()) { IgniteBiTuple<Integer, Path> t = queue.pollFirst(); int curDepth = t.getKey(); Path curPath = t.getValue(); if (curDepth <= depth) { int newDepth = curDepth + 1; // Create directories. for (int i = 0; i < entryCnt; i++) { Path subDir = new Path(curPath, "dir-" + newDepth + "-" + i); try { if (fs.mkdirs(subDir)) queue.addLast(F.t(newDepth, subDir)); } catch (IOException e) { err.compareAndSet(null, e); } } } } } }, THREAD_CNT);
/** * Convenient method for index creation. * * @param name Name. * @param fields Fields. * @return Index. */ protected static QueryIndex index(String name, IgniteBiTuple<String, Boolean>... fields) { QueryIndex idx = new QueryIndex(); idx.setName(name); LinkedHashMap<String, Boolean> fields0 = new LinkedHashMap<>(); for (IgniteBiTuple<String, Boolean> field : fields) fields0.put(field.getKey(), field.getValue()); idx.setFields(fields0); return idx; }
@Override public void run() { Deque<IgniteBiTuple<Integer, Path>> queue = new ArrayDeque<>(); queue.add(F.t(0, dir)); U.awaitQuiet(barrier); while (!queue.isEmpty()) { IgniteBiTuple<Integer, Path> t = queue.pollFirst(); int curDepth = t.getKey(); Path curPath = t.getValue(); if (curDepth <= depth) { int newDepth = curDepth + 1; // Create directories. for (int i = 0; i < entryCnt; i++) { Path subDir = new Path(curPath, "dir-" + newDepth + "-" + i); try { fs.mkdir(subDir, FsPermission.getDefault(), true); } catch (IOException ignore) { err.set(true); } queue.addLast(F.t(newDepth, subDir)); } } } } }, THREAD_CNT);
/** {@inheritDoc} */ @SuppressWarnings("ConstantConditions") @Override public IgniteBiTuple<Long, Integer> reduce(List<ComputeJobResult> ress) { long totalLen = 0; int argCnt = 0; for (ComputeJobResult res : ress) { IgniteBiTuple<Long, Integer> res0 = res.getData(); if (res0 != null) { totalLen += res0.getKey(); argCnt += res0.getValue(); } } return F.t(totalLen, argCnt); } }
/** * Retrieves all versions of all keys from cache. * * @param cache Cache. * @return {@link Map} of keys to its versions. * @throws IgniteCheckedException If failed. */ private Map<KeyCacheObject, List<CacheDataRow>> allVersions(IgniteCache cache) throws IgniteCheckedException { IgniteCacheProxy cache0 = (IgniteCacheProxy)cache; GridCacheContext cctx = cache0.context(); assert cctx.mvccEnabled(); Map<KeyCacheObject, List<CacheDataRow>> vers = new HashMap<>(); for (Object e : cache) { IgniteBiTuple entry = (IgniteBiTuple)e; KeyCacheObject key = cctx.toCacheKeyObject(entry.getKey()); GridCursor<CacheDataRow> cur = cctx.offheap().mvccAllVersionsCursor(cctx, key, null); List<CacheDataRow> rows = new ArrayList<>(); while (cur.next()) { CacheDataRow row = cur.get(); rows.add(row); } vers.put(key, rows); } return vers; }
/** * Test task. * * @throws Exception If failed. */ @SuppressWarnings("ConstantConditions") @Test public void testTask() throws Exception { String arg = DICTIONARY[new Random(System.currentTimeMillis()).nextInt(DICTIONARY.length)]; generateFile(TOTAL_WORDS); Long genLen = igfs.info(FILE).length(); IgniteBiTuple<Long, Integer> taskRes = igfs.execute(new Task(), new IgfsStringDelimiterRecordResolver(" "), Collections.singleton(FILE), arg); assert F.eq(genLen, taskRes.getKey()); assert F.eq(TOTAL_WORDS, taskRes.getValue()); }
/** * Test task. * * @throws Exception If failed. */ @SuppressWarnings("ConstantConditions") @Test public void testTaskAsync() throws Exception { String arg = DICTIONARY[new Random(System.currentTimeMillis()).nextInt(DICTIONARY.length)]; generateFile(TOTAL_WORDS); Long genLen = igfs.info(FILE).length(); IgniteBiTuple<Long, Integer> taskRes = igfs.executeAsync(new Task(), new IgfsStringDelimiterRecordResolver(" "), Collections.singleton(FILE), arg).get(); assert F.eq(genLen, taskRes.getKey()); assert F.eq(TOTAL_WORDS, taskRes.getValue()); }
/** */ private void checkAllVersionsHints(IgniteCache cache) throws IgniteCheckedException { IgniteCacheProxy cache0 = (IgniteCacheProxy)cache; GridCacheContext cctx = cache0.context(); assert cctx.mvccEnabled(); for (Object e : cache) { IgniteBiTuple entry = (IgniteBiTuple)e; KeyCacheObject key = cctx.toCacheKeyObject(entry.getKey()); GridCursor<CacheDataRow> cur = cctx.offheap().mvccAllVersionsCursor(cctx, key, CacheDataRowAdapter.RowData.LINK_WITH_HEADER); while (cur.next()) { CacheDataRow row = cur.get(); assertTrue(row.mvccTxState() != 0); } } }
/** * Scan (with explicit {@code setLocal(true)}) should perform on the local node. * * @throws Exception If failed. */ @Test public void testScanLocalExplicit() throws Exception { cacheMode = CacheMode.PARTITIONED; backups = 0; commSpiFactory = new TestLocalCommunicationSpiFactory(); try { Ignite ignite = startGrids(GRID_CNT); IgniteCacheProxy<Integer, Integer> cache = fillCache(ignite); int part = anyLocalPartition(cache.context()); QueryCursor<Cache.Entry<Integer, Integer>> qry = cache.query(new ScanQuery<Integer, Integer>().setPartition(part).setLocal(true)); doTestScanQuery(qry, part); GridTestUtils.assertThrows(log, (Callable<Void>)() -> { int remPart = remotePartition(cache.context()).getKey(); cache.query(new ScanQuery<Integer, Integer>().setPartition(remPart).setLocal(true)); return null; }, IgniteCheckedException.class, null); } finally { stopAllGrids(); } }