/** * {@inheritDoc} */ @Override public ConnectorFactory create(@Nonnull @NonNull final ConnectorContext connectorContext) { return new SnowflakeConnectorFactory(connectorContext.getCatalogName(), connectorContext.getCatalogShardName(), connectorContext.getConfiguration()); }
/** * Constructor. * * @param connectorContext server context * @param threadServiceManager thread service manager * @param jdbcTemplate JDBC template * @param fastServiceMetric fast service metric */ public DirectSqlGetPartition( final ConnectorContext connectorContext, final ThreadServiceManager threadServiceManager, @Qualifier("hiveReadJdbcTemplate") final JdbcTemplate jdbcTemplate, final HiveConnectorFastServiceMetric fastServiceMetric ) { this.catalogName = connectorContext.getCatalogName(); this.threadServiceManager = threadServiceManager; this.registry = connectorContext.getRegistry(); this.config = connectorContext.getConfig(); this.jdbcTemplate = jdbcTemplate; this.fastServiceMetric = fastServiceMetric; this.isAuditProcessingEnabled = Boolean.valueOf(connectorContext.getConfiguration() .getOrDefault(HiveConfigConstants.ENABLE_AUDIT_PROCESSING, "true")); }
/** * Constructor. * * @param connectorContext server context * @param jdbcTemplate JDBC template * @param fastServiceMetric fast service metric * @param directSqlSavePartition direct sql partition service */ public DirectSqlTable( final ConnectorContext connectorContext, final JdbcTemplate jdbcTemplate, final HiveConnectorFastServiceMetric fastServiceMetric, final DirectSqlSavePartition directSqlSavePartition ) { this.catalogName = connectorContext.getCatalogName(); this.registry = connectorContext.getRegistry(); this.jdbcTemplate = jdbcTemplate; this.fastServiceMetric = fastServiceMetric; this.directSqlSavePartition = directSqlSavePartition; }
/** * Constructor. * * @param connectorContext connector context * @param jdbcTemplate JDBC template * @param sequenceGeneration sequence generator * @param fastServiceMetric fast service metric */ public DirectSqlSavePartition(final ConnectorContext connectorContext, final JdbcTemplate jdbcTemplate, final SequenceGeneration sequenceGeneration, final HiveConnectorFastServiceMetric fastServiceMetric) { this.registry = connectorContext.getRegistry(); this.catalogName = connectorContext.getCatalogName(); this.batchSize = connectorContext.getConfig().getHiveMetastoreBatchSize(); this.jdbcTemplate = jdbcTemplate; this.sequenceGeneration = sequenceGeneration; this.fastServiceMetric = fastServiceMetric; }
try { final HiveConf conf = this.getDefaultConf(connectorContext); connectorContext.getConfiguration().forEach(conf::set); DataSourceManager.get().load( connectorContext.getCatalogShardName(), connectorContext.getConfiguration() ); return new EmbeddedHiveClient( connectorContext.getCatalogName(), HMSHandlerProxy.getProxy(conf, connectorContext.getRegistry()), connectorContext.getRegistry() ); } catch (Exception e) { String.format( "Failed creating the hive metastore client for catalog: %s", connectorContext.getCatalogName() ),
final String connectorType = connectorContext.getConnectorType(); final String catalogName = connectorContext.getCatalogName(); final String catalogShardName = connectorContext.getCatalogShardName(); final ConnectorPlugin connectorPlugin = plugins.get(connectorType); if (connectorPlugin != null) { final MetacatCatalogConfig catalogConfig = MetacatCatalogConfig.createFromMapAndRemoveProperties(connectorType, catalogName, connectorContext.getConfiguration()); final List<String> databaseNames = catalogConfig.getSchemaWhitelist(); if (databaseNames.isEmpty()) {
private int getDataStoreReadTimeout(final ConnectorContext connectorContext) { int result = DEFAULT_DATASTORE_READ_TIMEOUT; try { result = Integer.parseInt( connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT)); } catch (final Exception ignored) { } return result; }
/** * Constructor. * * @param connectorInfoConverter connector info converter * @param connectorContext connector related config */ public SpringConnectorFactory(final ConnectorInfoConverter connectorInfoConverter, final ConnectorContext connectorContext) { this.catalogName = connectorContext.getCatalogName(); this.catalogShardName = connectorContext.getCatalogShardName(); this.ctx = new AnnotationConfigApplicationContext(); this.ctx.setEnvironment(new StandardEnvironment()); this.ctx.getBeanFactory().registerSingleton("ConnectorContext", connectorContext); this.ctx.getBeanFactory().registerSingleton("ConnectorInfoConverter", connectorInfoConverter); }
/** * create warehouse for file system calls. * * @param connectorContext connector config context * @return WareHouse */ @Bean public Warehouse warehouse(final ConnectorContext connectorContext) { try { final HiveConf conf = this.getDefaultConf(connectorContext); connectorContext.getConfiguration().forEach(conf::set); return new Warehouse(conf); } catch (Exception e) { throw new IllegalArgumentException( String.format( "Failed creating the hive warehouse for catalog: %s", connectorContext.getCatalogName() ), e ); } }
/** * Constructor. * * @param context connector context * @param metacatHiveClient hive client * @param hiveMetacatConverters hive converter */ public HiveConnectorPartitionService( final ConnectorContext context, final IMetacatHiveClient metacatHiveClient, final HiveConnectorInfoConverter hiveMetacatConverters ) { this.metacatHiveClient = metacatHiveClient; this.hiveMetacatConverters = hiveMetacatConverters; this.catalogName = context.getCatalogName(); this.context = context; }
/** * hive DataSource. * * @param connectorContext connector config. * @return data source */ @Bean public DataSource hiveDataSource(final ConnectorContext connectorContext) { final HiveConf conf = this.getDefaultConf(connectorContext); connectorContext.getConfiguration().forEach(conf::set); DataSourceManager.get().load( connectorContext.getCatalogShardName(), connectorContext.getConfiguration() ); return DataSourceManager.get().get(connectorContext.getCatalogShardName()); }
/** * Constructor. * * @param connectorContext connector context */ public IcebergTableHandler(final ConnectorContext connectorContext) { this.conf = new Configuration(); this.connectorContext = connectorContext; this.registry = connectorContext.getRegistry(); connectorContext.getConfiguration().keySet() .forEach(key -> conf.set(key, connectorContext.getConfiguration().get(key))); this.icebergTableCriteria = new IcebergTableCriteriaImpl(connectorContext); this.icebergTableOpWrapper = new IcebergTableOpWrapper(connectorContext); }
/** * {@inheritDoc}. */ @Override public void deletePartitions( final ConnectorRequestContext requestContext, final QualifiedName tableName, final List<String> partitionNames, final TableInfo tableInfo ) { //TODO: implemented as next step if (context.getConfig().isIcebergEnabled() && HiveTableUtil.isIcebergTable(tableInfo)) { throw new MetacatNotSupportedException("IcebergTable Unsupported Operation!"); } //The direct sql based deletion doesn't check if the partition is valid if (Boolean.parseBoolean(getContext().getConfiguration() .getOrDefault(HiveConfigConstants.USE_FAST_DELETION, "false"))) { directSqlSavePartition.delete(tableName, partitionNames); } else { //will throw exception if the partitions are invalid super.deletePartitions(requestContext, tableName, partitionNames, tableInfo); } }
private String getIcebergPartitionURI(final String databaseName, final String tableName, final String partitionName, @Nullable final Long dataTimestampMillis) { return String.format("%s://%s.%s/%s/snapshot_time=%s", context.getConfig().getIcebergPartitionUriScheme(), databaseName, tableName, partitionName, (dataTimestampMillis == null) ? partitionName.hashCode() : Instant.ofEpochMilli(dataTimestampMillis).getEpochSecond()); } }
/** * thread Service Manager. * @param connectorContext connector config * @return threadServiceManager */ @Bean public ThreadServiceManager threadServiceManager(final ConnectorContext connectorContext) { return new ThreadServiceManager(connectorContext.getRegistry(), connectorContext.getConfig().getServiceMaxNumberOfThreads(), 1000, "hive"); } }
/** * create hive connector fast service metric. * * @param connectorContext connector config * @return HiveConnectorFastServiceMetric */ @Bean public HiveConnectorFastServiceMetric hiveConnectorFastServiceMetric( final ConnectorContext connectorContext ) { return new HiveConnectorFastServiceMetric( connectorContext.getRegistry() ); }
try { final HiveConf conf = this.getDefaultConf(connectorContext); connectorContext.getConfiguration().forEach(conf::set); DataSourceManager.get().load( connectorContext.getCatalogShardName(), connectorContext.getConfiguration() ); return new EmbeddedHiveClient( connectorContext.getCatalogName(), HMSHandlerProxy.getProxy(conf, connectorContext.getRegistry()), connectorContext.getRegistry() ); } catch (Exception e) { String.format( "Failed creating the hive metastore client for catalog: %s", connectorContext.getCatalogName() ),
private int getDataStoreTimeout(final ConnectorContext connectorContext) { int result = DEFAULT_DATASTORE_TIMEOUT; try { result = Integer.parseInt( connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT)); } catch (final Exception ignored) { } return result; }
/** * Constructor. * * @param connectorInfoConverter connector info converter * @param connectorContext connector related config */ public SpringConnectorFactory(final ConnectorInfoConverter connectorInfoConverter, final ConnectorContext connectorContext) { this.catalogName = connectorContext.getCatalogName(); this.catalogShardName = connectorContext.getCatalogShardName(); this.ctx = new AnnotationConfigApplicationContext(); this.ctx.setEnvironment(new StandardEnvironment()); this.ctx.getBeanFactory().registerSingleton("ConnectorContext", connectorContext); this.ctx.getBeanFactory().registerSingleton("ConnectorInfoConverter", connectorInfoConverter); }
/** * create warehouse for file system calls. * * @param connectorContext connector config context * @return WareHouse */ @Bean public Warehouse warehouse(final ConnectorContext connectorContext) { try { final HiveConf conf = this.getDefaultConf(connectorContext); connectorContext.getConfiguration().forEach(conf::set); return new Warehouse(conf); } catch (Exception e) { throw new IllegalArgumentException( String.format( "Failed creating the hive warehouse for catalog: %s", connectorContext.getCatalogName() ), e ); } }