/** * Read the table type from the table properties and if not found, return the default */ public HoodieTableType getTableType() { if (props.containsKey(HOODIE_TABLE_TYPE_PROP_NAME)) { return HoodieTableType.valueOf(props.getProperty(HOODIE_TABLE_TYPE_PROP_NAME)); } return DEFAULT_TABLE_TYPE; }
/** * Helper method to initialize a dataset, with given basePath, tableType, name, archiveFolder */ public static HoodieTableMetaClient initTableType(Configuration hadoopConf, String basePath, String tableType, String tableName, String archiveLogFolder) throws IOException { HoodieTableType type = HoodieTableType.valueOf(tableType); Properties properties = new Properties(); properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, tableName); properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, type.name()); properties.put(HoodieTableConfig.HOODIE_ARCHIVELOG_FOLDER_PROP_NAME, archiveLogFolder); return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties); }
final HoodieTableType tableType = HoodieTableType.valueOf(tableTypeStr); HoodieTableMetaClient.initTableType(HoodieCLI.conf, path, tableType, name, payloadClass);
if (!fs.exists(path)) { HoodieTableMetaClient .initTableType(jsc.hadoopConfiguration(), tablePath, HoodieTableType.valueOf(tableType), tableName, HoodieAvroPayload.class.getName()); if (HoodieTableType.valueOf(tableType) == HoodieTableType.MERGE_ON_READ) { Optional<String> instant = client.scheduleCompaction(Optional.empty()); JavaRDD<WriteStatus> writeStatues = client.compact(instant.get());
.initTableType(jsc.hadoopConfiguration(), dfsBasePath, HoodieTableType.valueOf(tableType), tableName, HoodieAvroPayload.class.getName()); .initTableType(jsc.hadoopConfiguration(), tablePath, HoodieTableType.valueOf(tableType), tableName, HoodieAvroPayload.class.getName()); HoodieWriteConfig localConfig = HoodieWriteConfig.newBuilder().withPath(tablePath)