/** * Deletes the YARN application files, e.g., Flink binaries, libraries, etc., from the remote * filesystem. * * @param env The environment variables. */ public static void deleteApplicationFiles(final Map<String, String> env) { final String applicationFilesDir = env.get(YarnConfigKeys.FLINK_YARN_FILES); if (!StringUtils.isNullOrWhitespaceOnly(applicationFilesDir)) { final org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(applicationFilesDir); try { final org.apache.flink.core.fs.FileSystem fileSystem = path.getFileSystem(); if (!fileSystem.delete(path, true)) { LOG.error("Deleting yarn application files under {} was unsuccessful.", applicationFilesDir); } } catch (final IOException e) { LOG.error("Could not properly delete yarn application files directory {}.", applicationFilesDir, e); } } else { LOG.debug("No yarn application files directory set. Therefore, cannot clean up the data."); } }
@Override public void setDefaultDatabaseName(String databaseName) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(databaseName)); defaultDatabaseName = databaseName; }
@Override public void setDefaultDatabaseName(String databaseName) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(databaseName)); defaultDatabaseName = databaseName; }
public static <T> T fromJson(String json, Class c, boolean ignoreUnknownProperties) { if (StringUtils.isNullOrWhitespaceOnly(json)) { return null; } try { ObjectMapper objectMapper = new ObjectMapper(); objectMapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); if (ignoreUnknownProperties) { objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } else { objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); } return (T) objectMapper.readValue(json, c); } catch (IOException e) { // DON'T add the json string to error message to spam logging // e will contain the json string throw new IllegalArgumentException("error converting from JSON string.", e); } }
public FlinkInMemoryCatalog(String name) { Preconditions.checkArgument(!StringUtils.isNullOrWhitespaceOnly(name), "name cannot be null or empty"); this.catalogName = name; this.databases = new LinkedHashMap<>(); this.databases.put(DEFAULT_DB, new CatalogDatabase()); this.tables = new LinkedHashMap<>(); this.partitions = new LinkedHashMap<>(); }
public void setDefaultCatalog(String catalogName) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogName), "catalogName cannot be null or empty"); checkArgument(catalogs.keySet().contains(catalogName), String.format("Cannot find registered catalog %s", catalogName)); if (!defaultCatalogName.equals(catalogName)) { defaultCatalogName = catalogName; defaultDbName = catalogs.get(catalogName).getDefaultDatabaseName(); LOG.info("Set default catalog as '{}' and default database as '{}'", defaultCatalogName, defaultDbName); } }
public void setDefaultDatabase(String catalogName, String dbName) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogName), "catalogName cannot be null or empty"); checkArgument(!StringUtils.isNullOrWhitespaceOnly(dbName), "dbName cannot be null or empty"); checkArgument(catalogs.containsKey(catalogName), String.format("Cannot find registered catalog %s", catalogName)); checkArgument(catalogs.get(catalogName).listDatabases().contains(dbName), String.format("Cannot find registered database %s", dbName)); defaultCatalogName = catalogName; defaultDbName = dbName; LOG.info("Set default catalog as '{}' and default database as '{}'", defaultCatalogName, defaultDbName); }
public HiveCatalog(String catalogName, HiveConf hiveConf) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogName), "catalogName cannot be null or empty"); this.catalogName = catalogName; this.hiveConf = checkNotNull(hiveConf, "hiveConf cannot be null"); LOG.info("Created HiveCatalog '{}'", catalogName); }
@Override public List<ObjectPath> listTables(String dbName) throws DatabaseNotExistException { checkArgument(!StringUtils.isNullOrWhitespaceOnly(dbName), "dbName cannot be null or empty"); if (!dbExists(dbName)) { throw new DatabaseNotExistException(catalogName, dbName); } return tables.keySet().stream() .filter(k -> k.getDbName().equals(dbName)) .collect(Collectors.toList()); }
public Builder(String tableType, TableSchema tableSchema, boolean isStreaming) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(tableType), "tableType cannot be null or empty"); checkNotNull(tableSchema, "tableSchema cannot be null or empty"); this.tableType = tableType; this.tableSchema = tableSchema; this.isStreaming = isStreaming; }
/** * Deletes the YARN application files, e.g., Flink binaries, libraries, etc., from the remote * filesystem. * * @param env The environment variables. */ public static void deleteApplicationFiles(final Map<String, String> env) { final String applicationFilesDir = env.get(YarnConfigKeys.FLINK_YARN_FILES); if (!StringUtils.isNullOrWhitespaceOnly(applicationFilesDir)) { final org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(applicationFilesDir); try { final org.apache.flink.core.fs.FileSystem fileSystem = path.getFileSystem(); if (!fileSystem.delete(path, true)) { LOG.error("Deleting yarn application files under {} was unsuccessful.", applicationFilesDir); } } catch (final IOException e) { LOG.error("Could not properly delete yarn application files directory {}.", applicationFilesDir, e); } } else { LOG.debug("No yarn application files directory set. Therefore, cannot clean up the data."); } }
@Override public FlinkKafkaConsumerBase createKafkaConsumer() { FlinkKafkaConsumerBase consumer; KafkaMessageDeserialization kafkaMessageDeserialization = new KafkaMessageDeserialization(baseRowTypeInfo); Pattern pattern; if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) { pattern = Pattern.compile(topicPattern); consumer = new FlinkKafkaConsumer09(pattern, kafkaMessageDeserialization, properties); } else { consumer = new FlinkKafkaConsumer09(topic, kafkaMessageDeserialization, properties); } return consumer; }
public void registerCatalog(String catalogName, ReadableCatalog catalog) throws CatalogAlreadyExistException { checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogName), "catalogName cannot be null or empty"); checkNotNull(catalog, "catalog cannot be null"); if (catalogs.containsKey(catalogName)) { throw new CatalogAlreadyExistException(catalogName); } catalogs.put(catalogName, catalog); catalog.open(); CatalogCalciteSchema.registerCatalog(rootSchema, catalogName, catalog); }
@Override public FlinkKafkaConsumerBase createKafkaConsumer() { FlinkKafkaConsumerBase consumer; KafkaMessageDeserialization kafkaMessageDeserialization = new KafkaMessageDeserialization(baseRowTypeInfo); Pattern pattern; if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) { pattern = Pattern.compile(topicPattern); consumer = new FlinkKafkaConsumer011(pattern, kafkaMessageDeserialization, properties); } else { consumer = new FlinkKafkaConsumer011(topic, kafkaMessageDeserialization, properties); } if (startupMode == StartupMode.TIMESTAMP && startTimeStamp >= -1){ ((FlinkKafkaConsumer011) consumer).setStartFromTimestamp(startTimeStamp); } return consumer; }
@Override public FlinkKafkaConsumerBase createKafkaConsumer() { FlinkKafkaConsumerBase consumer; KafkaMessageDeserialization kafkaMessageDeserialization = new KafkaMessageDeserialization(baseRowTypeInfo); Pattern pattern; if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) { pattern = Pattern.compile(topicPattern); consumer = new FlinkKafkaConsumer010(pattern, kafkaMessageDeserialization, properties); } else { consumer = new FlinkKafkaConsumer010(topic, kafkaMessageDeserialization, properties); } if (startupMode == StartupMode.TIMESTAMP && startTimeStamp >= -1){ ((FlinkKafkaConsumer010) consumer).setStartFromTimestamp(startTimeStamp); } return consumer; }
public static ReadableCatalog loadCatalogFromConfig( ClassLoader cl, String catalogType, String catalogName, Map<String, String> properties) throws DynamicCodeLoadingException { checkNotNull(cl, "class loader cannot be null or empty"); checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogType), "catalogType cannot be null or empty"); checkArgument(!StringUtils.isNullOrWhitespaceOnly(catalogName), "catalogName cannot be null or empty"); checkNotNull(properties, "properties cannot be null or empty"); switch (catalogType.toLowerCase()) { case FLINK_IN_MEMORY_CATALOG_NAME: return new FlinkInMemoryCatalogFactory().createCatalog(catalogName, properties); case HIVE_CATALOG_NAME: return loadCatalog( HIVE_CATALOG_FACTORY_CLASS_NAME, cl, catalogType, catalogName, properties); default: // To use self-defined catalog, user have to put the catalog's full class name as catalog type in config file return loadCatalog(catalogType, cl, catalogType, catalogName, properties); } }
@Override public BatchTableSink<BaseRow> createBatchTableSink(Map<String, String> props) { TableProperties tableProperties = new TableProperties(); tableProperties.putProperties(props); String filePath = tableProperties.getString(ParquetOptions.FILE_PATH); if (StringUtils.isNullOrWhitespaceOnly(filePath)) { throw new RuntimeException(ParquetOptions.PARAMS_HELP_MSG); } Option<WriteMode> writeModeOption = null; String writeMode = tableProperties.getString(ParquetOptions.WRITE_MODE); if (!DEFAULT_WRITE_MODE.equals(writeMode)) { writeModeOption = new Some(WriteMode.valueOf( tableProperties.getString(ParquetOptions.WRITE_MODE))); } CompressionCodecName compressionCodecName = CompressionCodecName.valueOf(tableProperties .getString(ParquetOptions.COMPRESSION_CODEC_NAME)); return new ParquetTableSink(filePath, writeModeOption, compressionCodecName); }
@Override public BatchTableSink<BaseRow> createBatchTableSink(Map<String, String> props) { TableProperties properties = new TableProperties(); properties.putProperties(props); String filePath = properties.getString(ORCOptions.FILE_PATH); if (StringUtils.isNullOrWhitespaceOnly(filePath)) { throw new RuntimeException(ORCOptions.PARAMS_HELP_MSG); } Option<FileSystem.WriteMode> writeModeOption = null; String writeMode = properties.getString(ORCOptions.WRITE_MODE); if (!DEFAULT_WRITE_MODE.equals(writeMode)) { writeModeOption = new Some(FileSystem.WriteMode.valueOf( properties.getString(ORCOptions.WRITE_MODE))); } CompressionKind compressionKind = CompressionKind.valueOf( properties.getString(ORCOptions.COMPRESSION_CODEC_NAME)); return new OrcTableSink(filePath, writeModeOption, compressionKind); }
private OrcVectorizedColumnRowTableSource createSource(Map<String, String> props) { TableProperties properties = new TableProperties(); properties.putProperties(props); RichTableSchema schema = properties.readSchemaFromProperties(null); String filePath = properties.getString(ORCOptions.FILE_PATH); if (StringUtils.isNullOrWhitespaceOnly(filePath)) { throw new RuntimeException(ORCOptions.PARAMS_HELP_MSG); } boolean enumerateNestedFiles = properties.getBoolean(ORCOptions.ENUMERATE_NESTED_FILES); InternalType[] dataTypes = Arrays.stream(schema.getColumnTypes()).toArray(InternalType[]::new); OrcVectorizedColumnRowTableSource t = new OrcVectorizedColumnRowTableSource( new Path(filePath), dataTypes, schema.getColumnNames(), enumerateNestedFiles); t.setSchemaFields(schema.getColumnNames()); return t; }
private ParquetVectorizedColumnRowTableSource getSource(Map<String, String> props) { TableProperties tableProperties = new TableProperties(); tableProperties.putProperties(props); RichTableSchema richTableSchema = tableProperties.readSchemaFromProperties(null); String filePath = tableProperties.getString(ParquetOptions.FILE_PATH); if (StringUtils.isNullOrWhitespaceOnly(filePath)) { throw new RuntimeException(ParquetOptions.PARAMS_HELP_MSG); } boolean enumerateNestedFiles = tableProperties.getBoolean(ParquetOptions.ENUMERATE_NESTED_FILES); return new ParquetVectorizedColumnRowTableSource( new Path(filePath), richTableSchema.getColumnTypes(), richTableSchema.getColumnNames(), enumerateNestedFiles); }