protected void execute(JobStatusUpdater statusUpdater,
JobSuite suite, ICrawlDataStore crawlDataStore) {
LOG.info(getId() + ": Crawling references...");
ImporterPipelineContext contextPrototype =
new ImporterPipelineContext(this, crawlDataStore);
processReferences(statusUpdater, suite, contextPrototype);
if (!isStopped()) {
handleOrphans(crawlDataStore, statusUpdater, suite);
}
ICommitter committer = getCrawlerConfig().getCommitter();
if (committer != null) {
LOG.info(getId() + ": Crawler "
+ (isStopped() ? "stopping" : "finishing")
+ ": committing documents.");
committer.commit();
}
LOG.info(getId() + ": " + processedCount + " reference(s) processed.");
LOG.debug(getId() + ": Removing empty directories");
FileUtil.deleteEmptyDirs(getCrawlerDownloadDir());
if (!isStopped()) {
fireCrawlerEvent(CrawlerEvent.CRAWLER_FINISHED, null, this);
} else {
fireCrawlerEvent(CrawlerEvent.CRAWLER_STOPPED, null, this);
}
LOG.info(getId() + ": Crawler "
+ (isStopped() ? "stopped." : "completed."));
}