@Override public void createPendingSegmentsTable() { if (config.get().isCreateTables()) { createPendingSegmentsTable(tablesConfigSupplier.get().getPendingSegmentsTable()); } }
@Override public int deletePendingSegments(String dataSource, Interval deleteInterval) { return connector.getDBI().inTransaction( (handle, status) -> handle .createStatement( StringUtils.format( "delete from %s where datasource = :dataSource and created_date >= :start and created_date < :end", dbTables.getPendingSegmentsTable() ) ) .bind("dataSource", dataSource) .bind("start", deleteInterval.getStart().toString()) .bind("end", deleteInterval.getEnd().toString()) .execute() ); }
+ "sequence_name = :sequence_name AND " + "sequence_prev_id = :sequence_prev_id", dbTables.getPendingSegmentsTable()
private List<SegmentIdWithShardSpec> getPendingSegmentsForIntervalWithHandle( final Handle handle, final String dataSource, final Interval interval ) throws IOException { final List<SegmentIdWithShardSpec> identifiers = new ArrayList<>(); final ResultIterator<byte[]> dbSegments = handle.createQuery( StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .iterator(); while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); final SegmentIdWithShardSpec identifier = jsonMapper.readValue(payload, SegmentIdWithShardSpec.class); if (interval.overlaps(identifier.getInterval())) { identifiers.add(identifier); } } dbSegments.close(); return identifiers; }
private void resetMetadataStore(Injector injector) { log.info("==========================================================================="); log.info("Deleting all Records from Metadata Storage."); log.info("==========================================================================="); MetadataStorageConnector connector = injector.getInstance(MetadataStorageConnector.class); MetadataStorageTablesConfig tablesConfig = injector.getInstance(MetadataStorageTablesConfig.class); String[] tables = new String[]{ tablesConfig.getDataSourceTable(), tablesConfig.getPendingSegmentsTable(), tablesConfig.getSegmentsTable(), tablesConfig.getRulesTable(), tablesConfig.getConfigTable(), tablesConfig.getTasksTable(), tablesConfig.getTaskLockTable(), tablesConfig.getTaskLogTable(), tablesConfig.getAuditTable(), tablesConfig.getSupervisorTable() }; for (String table : tables) { connector.deleteAllRecords(table); } }
+ "start = :start AND " + "%2$send%2$s = :end", dbTables.getPendingSegmentsTable(), connector.getQuoteString()
private void insertToMetastore( Handle handle, SegmentIdWithShardSpec newIdentifier, String dataSource, Interval interval, String previousSegmentId, String sequenceName, String sequenceNamePrevIdSha1 ) throws JsonProcessingException { handle.createStatement( StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, sequence_name, sequence_prev_id, sequence_name_prev_id_sha1, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :sequence_name, :sequence_prev_id, :sequence_name_prev_id_sha1, :payload)", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("id", newIdentifier.toString()) .bind("dataSource", dataSource) .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .bind("sequence_name", sequenceName) .bind("sequence_prev_id", previousSegmentId) .bind("sequence_name_prev_id_sha1", sequenceNamePrevIdSha1) .bind("payload", jsonMapper.writeValueAsBytes(newIdentifier)) .execute(); }
@Override public void createPendingSegmentsTable() { if (config.get().isCreateTables()) { createPendingSegmentsTable(tablesConfigSupplier.get().getPendingSegmentsTable()); } }
@Override public int deletePendingSegments(String dataSource, Interval deleteInterval) { return connector.getDBI().inTransaction( (handle, status) -> handle .createStatement( StringUtils.format( "delete from %s where datasource = :dataSource and created_date >= :start and created_date < :end", dbTables.getPendingSegmentsTable() ) ) .bind("dataSource", dataSource) .bind("start", deleteInterval.getStart().toString()) .bind("end", deleteInterval.getEnd().toString()) .execute() ); }
private List<SegmentIdentifier> getPendingSegmentsForIntervalWithHandle( final Handle handle, final String dataSource, final Interval interval ) throws IOException { final List<SegmentIdentifier> identifiers = Lists.newArrayList(); final ResultIterator<byte[]> dbSegments = handle.createQuery( StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .iterator(); while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); final SegmentIdentifier identifier = jsonMapper.readValue(payload, SegmentIdentifier.class); if (interval.overlaps(identifier.getInterval())) { identifiers.add(identifier); } } dbSegments.close(); return identifiers; }
+ "sequence_name = :sequence_name AND " + "sequence_prev_id = :sequence_prev_id", dbTables.getPendingSegmentsTable()
private void insertToMetastore( Handle handle, SegmentIdentifier newIdentifier, String dataSource, Interval interval, String previousSegmentId, String sequenceName, String sequenceNamePrevIdSha1 ) throws JsonProcessingException { handle.createStatement( StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, sequence_name, sequence_prev_id, sequence_name_prev_id_sha1, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :sequence_name, :sequence_prev_id, :sequence_name_prev_id_sha1, :payload)", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) ) .bind("id", newIdentifier.getIdentifierAsString()) .bind("dataSource", dataSource) .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .bind("sequence_name", sequenceName) .bind("sequence_prev_id", previousSegmentId) .bind("sequence_name_prev_id_sha1", sequenceNamePrevIdSha1) .bind("payload", jsonMapper.writeValueAsBytes(newIdentifier)) .execute(); }
private void resetMetadataStore(Injector injector) { log.info("==========================================================================="); log.info("Deleting all Records from Metadata Storage."); log.info("==========================================================================="); MetadataStorageConnector connector = injector.getInstance(MetadataStorageConnector.class); MetadataStorageTablesConfig tablesConfig = injector.getInstance(MetadataStorageTablesConfig.class); String[] tables = new String[]{ tablesConfig.getDataSourceTable(), tablesConfig.getPendingSegmentsTable(), tablesConfig.getSegmentsTable(), tablesConfig.getRulesTable(), tablesConfig.getConfigTable(), tablesConfig.getTasksTable(), tablesConfig.getTaskLockTable(), tablesConfig.getTaskLogTable(), tablesConfig.getAuditTable(), tablesConfig.getSupervisorTable() }; for (String table : tables) { connector.deleteAllRecords(table); } }
+ "start = :start AND " + "%2$send%2$s = :end", dbTables.getPendingSegmentsTable(), connector.getQuoteString()