private void batchCreate(List<MigrationJob> mjList) throws LightblueException { final int batchSize = 100; List<MigrationJob> batch = new ArrayList<>(batchSize); for (MigrationJob mj : mjList) { batch.add(mj); if (batch.size() >= batchSize) { filterDups(batch); if (!batch.isEmpty()) { DataInsertRequest req = new DataInsertRequest("migrationJob", null); req.create(batch); lbClient.data(req); } batch.clear(); } } if (batch.size() > 0) { filterDups(batch); if (!batch.isEmpty()) { DataInsertRequest req = new DataInsertRequest("migrationJob", null); req.create(batch); lbClient.data(req); } } }
public AbstractController(Controller controller,MigrationConfiguration migrationConfiguration,String threadGroupName) { this.migrationConfiguration=migrationConfiguration; this.controller=controller; lbClient=controller.getLightblueClient(); locking=lbClient.getLocking("migration"); if(migrationConfiguration.getMigratorClass()==null) migratorClass=DefaultMigrator.class; else { try { migratorClass = Class.forName(migrationConfiguration.getMigratorClass()); } catch (Exception e) { throw new RuntimeException(e); } } migratorThreads = new ThreadGroup(threadGroupName); }
/** * Load migration configuration based on its id */ public MigrationConfiguration loadMigrationConfiguration(String migrationConfigurationId) throws IOException, LightblueException { DataFindRequest findRequest = new DataFindRequest("migrationConfiguration", null); findRequest.where(Query.withValue("_id", Query.eq, migrationConfigurationId)); findRequest.select(Projection.includeFieldRecursively("*")); LOGGER.debug("Loading configuration"); return lightblueClient.data(findRequest, MigrationConfiguration.class); }
/** * Read configurations from the database whose name matches this instance * name */ public MigrationConfiguration[] getMigrationConfigurations() throws IOException, LightblueException { DataFindRequest findRequest = new DataFindRequest("migrationConfiguration", null); findRequest.where(Query.withValue("consistencyCheckerName", Query.eq, cfg.getName())); findRequest.select(Projection.includeFieldRecursively("*")); LOGGER.debug("Loading configuration:{}", findRequest.getBody()); return lightblueClient.data(findRequest, MigrationConfiguration[].class); }
/** * Retrieves jobs that are available, and their scheduled time has passed. * Returns at most batchSize jobs starting at startIndex */ public MigrationJob[] retrieveJobs(int batchSize, int startIndex, JobType jobType) throws IOException, LightblueException { LOGGER.debug("Retrieving jobs: batchSize={}, startIndex={}", batchSize, startIndex); DataFindRequest findRequest = new DataFindRequest("migrationJob", null); List<Query> conditions = new ArrayList<>(Arrays.asList(new Query[] { // get jobs for this configuration Query.withValue("configurationName", Query.eq, migrationConfiguration.getConfigurationName()), // get jobs whose state ara available Query.withValue("status", Query.eq, "available"), // only get jobs that are Query.withValue("scheduledDate", Query.lte, new Date()) })); if (jobType == JobType.GENERATED) { LOGGER.debug("Looking for generated job"); conditions.add(Query.withValue("generated", Query.eq, true)); } else if (jobType == JobType.NONGENERATED) { LOGGER.debug("Looking for non generated job"); conditions.add(Query.withValue("generated", Query.eq, false)); } findRequest.where(Query.and(conditions)); findRequest.select(Projection.includeField("*")); findRequest.range(startIndex, startIndex + batchSize - 1); LOGGER.debug("Finding Jobs to execute: {}", findRequest.getBody()); return lbClient.data(findRequest, MigrationJob[].class); }
@Override public List<JsonNode> getSourceDocuments() { LOGGER.debug("Retrieving source docs"); try { DataFindRequest sourceRequest = new DataFindRequest(getMigrationConfiguration().getSourceEntityName(), getMigrationConfiguration().getSourceEntityVersion()); sourceRequest.where(Query.query((ContainerNode) JSON.toJsonNode(getMigrationJob().getQuery()))); sourceRequest.select(Projection.includeFieldRecursively("*"), Projection.excludeField("objectType")); LOGGER.debug("Source docs retrieval req: {}", sourceRequest.getBody()); JsonNode[] results = getSourceCli().data(sourceRequest, JsonNode[].class); LOGGER.debug("There are {} source docs", results.length); return Arrays.asList(results); } catch (Exception e) { LOGGER.error("Error while retrieving source documents:{}", e); throw new RuntimeException("Cannot retrieve source documents:" + e); } }
private void doDestinationDocumentFetch(List<Identity> ids, List<JsonNode> dest) throws Exception { if (ids != null && !ids.isEmpty()) { DataFindRequest destinationRequest = new DataFindRequest(getMigrationConfiguration().getDestinationEntityName(), getMigrationConfiguration().getDestinationEntityVersion()); List<Query> requestConditions = new ArrayList<>(); for (Identity id : ids) { List<Query> docConditions = new ArrayList<>(); int i = 0; for (String keyField : getMigrationConfiguration().getDestinationIdentityFields()) { Object v = id.get(i); Query docQuery = Query.withValue(keyField, Query.eq, v == null ? null : v.toString()); docConditions.add(docQuery); i++; } requestConditions.add(Query.and(docConditions)); } destinationRequest.where(Query.or(requestConditions)); destinationRequest.select(Projection.includeFieldRecursively("*"), Projection.excludeField("objectType")); LOGGER.debug("Fetching destination docs {}", destinationRequest.getBody()); JsonNode[] nodes = getDestCli().data(destinationRequest, JsonNode[].class); if (nodes != null) { LOGGER.debug("There are {} destination docs", nodes.length); for (JsonNode node : nodes) { dest.add(node); } } } }
private LightblueResponse saveBatch(List<JsonNode> documentsToOverwrite) throws LightblueResponseException { // LightblueClient - save & overwrite documents DataSaveRequest saveRequest = new DataSaveRequest(getMigrationConfiguration().getDestinationEntityName(), getMigrationConfiguration().getDestinationEntityVersion()); saveRequest.setUpsert(true); saveRequest.create(documentsToOverwrite.toArray()); saveRequest.returns(Projection.includeField("*")); LightblueResponse response; try { response = getDestCli().data(saveRequest); } catch (LightblueException ex) { // bad things happened, bail! throw new RuntimeException(ex); } return response; }
/** * Read a configuration from the database whose name matches the the given * configuration name */ public MigrationConfiguration getMigrationConfiguration(String configurationName) throws IOException, LightblueException { DataFindRequest findRequest = new DataFindRequest("migrationConfiguration", null); findRequest.where(Query.and( Query.withValue("configurationName", Query.eq, configurationName), Query.withValue("consistencyCheckerName", Query.eq, cfg.getName())) ); findRequest.select(Projection.includeFieldRecursively("*")); LOGGER.debug("Loading configuration:{}", findRequest.getBody()); return lightblueClient.data(findRequest, MigrationConfiguration.class); }
MigrationJob[] dups = null; try { dups = lbClient.data(req, MigrationJob[].class); } catch (Exception e) { LOGGER.error("Cannot de-dup", e);
LightblueDataResponse response = client.data(r); System.out.println(response.getText());
private boolean migrationJobsExist() { LOGGER.debug("Checking if there are migration jobs for {}", migrationConfiguration.getConfigurationName()); DataFindRequest req = new DataFindRequest("migrationJob", null); req.where(Query.and(Query.withValue("configurationName", Query.eq, migrationConfiguration.getConfigurationName()), Query.withValue("generated", Query.eq, false), Query.withValue("status", Query.eq, MigrationJob.STATE_AVAILABLE))); req.select(Projection.includeField("_id")); req.range(1, 1); try { LightblueDataResponse resp = lbClient.data(req); return resp.parseMatchCount() > 0; } catch (Exception e) { LOGGER.error("Cannot query migration jobs:{}", e, e); return true; } }
MigrationJob[] jobs = cli.data(findRequest, MigrationJob[].class); if (jobs != null && jobs.length > 0) { LOGGER.debug("Deleting {} jobs", jobs.length); DataDeleteRequest del = new DataDeleteRequest("migrationJob", null); del.where(Query.withValues("_id", Query.in, ids(jobs))); cli.data(del); } else { break;
public void enableStuckJobs(LightblueClient cli, Date enableBefore) { DataFindRequest findRequest = new DataFindRequest("migrationJob", null); // Find active jobs that's been sitting for too long findRequest.where(Query.and(Query.withValue("status", Query.eq, MigrationJob.STATE_ACTIVE), Query.arrayMatch("jobExecutions", Query.and(Query.withValue("status", Query.eq, MigrationJob.STATE_ACTIVE), Query.withValue("actualStartDate", Query.lt, new Literal(enableBefore)))))); findRequest.select(Projection.includeField("_id")); findRequest.range(0, 250); LOGGER.debug("Re-enabling stuck jobs"); for (int loop = 0; loop < 10; loop++) { try { MigrationJob[] jobs = cli.data(findRequest, MigrationJob[].class); if (jobs != null && jobs.length > 0) { LOGGER.warn("Re-enabling {} active stuck jobs", jobs.length); DataUpdateRequest upd = new DataUpdateRequest("migrationJob", null); upd.where(Query.withValues("_id", Query.in, ids(jobs))); upd.updates(Update.set("status", MigrationJob.STATE_AVAILABLE)); LOGGER.debug("update:{}", upd.getBodyJson()); cli.data(upd); } else { break; } } catch (Exception e) { LOGGER.error("Error re-activating jobs", e); } } }
private void update(List<MigrationJob> mjList) throws Exception { batchCreate(mjList); DataUpdateRequest upd = new DataUpdateRequest("migrationConfiguration", null); upd.where(Query.withValue("_id", Query.eq, migrationConfiguration.get_id())); upd.updates(Update.set("timestampInitialValue", Literal.value(migrationConfiguration.getTimestampInitialValue()))); lbClient.data(upd); }
LOGGER.debug("Req:{}", updateRequest.getBody()); response = lbClient.data(updateRequest); ping("Updated migration job"); more("actualEndDate", Literal.value(new Date()))))); response = lbClient.data(updateRequest); } catch (Exception e) { LOGGER.error("Cannot update job {}, {} response:{}", migrationJob.get_id(), e, (response == null ? "null" : response.getJson()));