/** * Avoid calling it to fetch top level record fields. */ public GenericRecord getData() { return SparkUtil.deserialize(this.byteRecord, recordClassTag); }
@Override public boolean hasNext() { computeNext(); return !this.newFilesQ.isEmpty(); }
public static TimestampInfo generateEmptyTimestampInfo() { return new TimestampInfo(Optional.absent(), false); } }
@Before public void initClass() throws IOException { final Configuration conf = new Configuration(); this.fileSystem = FSUtils.getFs(conf); }
@Override public Void call() throws Exception { try (final LockManager lockManager = new LockManager(conf)) { coordinator.waitUntilStep(2); assertTrue(lockManager.lock(key2,"")); } coordinator.nextStep(); return null; } };
/** * Ensure that hoodie dataset is present. */ protected void initDataset() { try { HoodieUtil.initHoodieDataset(FSUtils.getFs(this.hoodieConf.getConf()), this.hoodieConf); } catch (IOException e) { log.error("Error initializing hoodie dataset.", e); throw new JobRuntimeException("Could not initialize hoodie dataset", e); } }
@Override public Void call() throws Exception { try (final LockManager lockManager = new LockManager(conf)) { coordinator.waitUntilStep(1); assertFalse(lockManager.batchLock(lockKeyList, "")); coordinator.nextStep(); coordinator.waitUntilStep(4); assertTrue(lockManager.batchLock(lockKeyList, "")); coordinator.nextStep(); } return null; } };
public static String serialize(@NonNull final SinkStat sinkStat) { return MapUtil.serializeMap(sinkStat.stats); }
public CassandraSchemaConverter(@NotEmpty final String keySpace, @NotEmpty final String tableName, @NonNull final Optional<Set<String>> filteredFields) { this(keySpace, tableName, TimestampInfo.generateEmptyTimestampInfo(), filteredFields); }
public DummyHoodieSinkDataConverter() { super(new Configuration(), new ErrorExtractor()); }
@Test public void testLockKeyGen() throws Exception { final String lockKey = LockManager.getLockKey("part1", "part2", "part3"); assertEquals("/part1/part2/part3", lockKey); }
private static Options getCLIOptions() { final Options options = new Options(); options.addOption(CommandLineUtil.generateOption("m", METADATA_FILE_OPTION, true, "HDFS metadata file", true)); return options; } }
public TSBasedHoodieSinkDataConverter(@NonNull final Configuration conf, @NotEmpty final String recordKeyFieldName, @NotEmpty final String partitionFieldName, @NonNull final TimeUnit timeUnit) { super(conf, new HoodieSinkConverterErrorExtractor()); this.recordKeyFieldName = recordKeyFieldName; this.partitionFieldName = partitionFieldName; this.timeUnit = timeUnit; }
public FileSourceConfiguration(@NonNull final Configuration conf) { ConfigUtil.checkMandatoryProperties(conf, getMandatoryProperties()); this.conf = conf; }
@Before public void setupTest() throws IOException { /** * We explicitly don't call close() in a tearDownTest() method as the Hadoop FileSystem object is cached * so if multiple threads are accessing can affect others if one thread closes it. */ this.fileSystem = FSUtils.getFs(new com.uber.marmaray.common.configuration.Configuration()); }
@Override public Void call() throws Exception { try (final LockManager lockManager = new LockManager(conf)) { coordinator.waitUntilStep(0); assertTrue(lockManager.lock(key1, "")); coordinator.nextStep(); coordinator.waitUntilStep(3); } coordinator.nextStep(); return null; } };
/** * This method will also be used by HoodieSink to retrieve and store metadata information. * It returns {@link HashMap<String, String>} with hoodie metadata information to be saved into commit file. * It returns {@link HashMap} instead of {@link Map} because hoodie needs it that way. Checkout * {@link HoodieWriteClient#commit(String, JavaRDD, java.util.Optional)} for more info. */ public HashMap<String, String> getMetadataInfo() { final HashMap<String, String> map = new HashMap<>(); map.put(HOODIE_METADATA_KEY, MapUtil.serializeMap(this.metadataMap)); return map; }
@Override public FileStatus next() { computeNext(); if (this.newFilesQ.isEmpty()) { return null; } else { return this.newFilesQ.pollFirst(); } } };
@Before public void setupTest() throws IOException { this.fs = FSUtils.getFs(new Configuration()); }
@Before public void setupTest() throws IOException { this.fileSystem = FSUtils.getFs(new Configuration()); }