private UpdateRequest newUpdateRequest() { UpdateRequest req = new UpdateRequest(); if(waitSearcher) { req.setAction(UpdateRequest.ACTION.COMMIT, true, true); } return req; }
/** Sets appropriate parameters for the given ACTION * * @deprecated Use {@link org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION} instead * */ public UpdateRequest setAction(ACTION action, boolean waitFlush, boolean waitSearcher ) { return setAction(action, waitFlush, waitSearcher, 1); }
/** * * * @deprecated Use {@link org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION} instead */ public UpdateRequest setAction(ACTION action, boolean waitFlush, boolean waitSearcher, int maxSegments , boolean expungeDeletes) { setAction(action, waitFlush, waitSearcher,maxSegments) ; params.set(UpdateParams.EXPUNGE_DELETES,""+expungeDeletes); return this; }
private UpdateRequest newUpdateRequest() { final UpdateRequest req = new UpdateRequest(); if(waitSearcher) { req.setAction(UpdateRequest.ACTION.COMMIT, true, true); } return req; }
@Override public void run() { if (oldLastCommit == lastCommit) { lastCommit = System.currentTimeMillis(); UpdateRequest req = new UpdateRequest(); req.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false); try { solrServer.request(req); } catch (Exception e) { logger.error("Failed to commit", e); } } } }, current + 100);
public UpdateResponse commit( boolean waitFlush, boolean waitSearcher ) throws SolrServerException, IOException { return new UpdateRequest().setAction( UpdateRequest.ACTION.COMMIT, waitFlush, waitSearcher ).process( this ); }
public UpdateResponse optimize(boolean waitFlush, boolean waitSearcher, int maxSegments ) throws SolrServerException, IOException { return new UpdateRequest().setAction( UpdateRequest.ACTION.OPTIMIZE, waitFlush, waitSearcher, maxSegments ).process( this ); }
UpdateRequest req = new UpdateRequest(); req.deleteByQuery("documentId:"documentId); req.setAction(ACTION.COMMIT, false, true);
/** * Performs an explicit commit, causing pending documents to be committed for indexing * * @param collection the Solr collection to send the commit to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the * main query searcher, making the changes visible * * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ public UpdateResponse commit(String collection, boolean waitFlush, boolean waitSearcher) throws SolrServerException, IOException { return new UpdateRequest() .setAction(UpdateRequest.ACTION.COMMIT, waitFlush, waitSearcher) .process(this, collection); }
/** * Performs an explicit commit, causing pending documents to be committed for indexing * * Be very careful when triggering commits from the client side. Commits are heavy operations and WILL impact Solr * performance when executed too often or too close together. Instead, consider using 'commitWithin' when adding documents * or rely on your core's/collection's 'autoCommit' settings. * * @param collection the Solr collection to send the commit to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the * main query searcher, making the changes visible * * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ public UpdateResponse commit(String collection, boolean waitFlush, boolean waitSearcher) throws SolrServerException, IOException { return new UpdateRequest() .setAction(UpdateRequest.ACTION.COMMIT, waitFlush, waitSearcher) .process(this, collection); }
/** * Performs an explicit optimize, causing a merge of all segments to one. * * Note: In most cases it is not required to do explicit optimize * * @param collection the Solr collection to send the optimize to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as * the main query searcher, making the changes visible * @param maxSegments optimizes down to at most this number of segments * * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ public UpdateResponse optimize(String collection, boolean waitFlush, boolean waitSearcher, int maxSegments) throws SolrServerException, IOException { return new UpdateRequest() .setAction(UpdateRequest.ACTION.OPTIMIZE, waitFlush, waitSearcher, maxSegments) .process(this, collection); }
/** * Performs an explicit commit, causing pending documents to be committed for indexing * * @param collection the Solr collection to send the commit to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the * main query searcher, making the changes visible * @param softCommit makes index changes visible while neither fsync-ing index files * nor writing a new index descriptor * * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ public UpdateResponse commit(String collection, boolean waitFlush, boolean waitSearcher, boolean softCommit) throws SolrServerException, IOException { return new UpdateRequest() .setAction(UpdateRequest.ACTION.COMMIT, waitFlush, waitSearcher, softCommit) .process(this, collection); }
/** * Performs an explicit commit, causing pending documents to be committed for indexing * * Be very careful when triggering commits from the client side. Commits are heavy operations and WILL impact Solr * performance when executed too often or too close together. Instead, consider using 'commitWithin' when adding documents * or rely on your core's/collection's 'autoCommit' settings. * * @param collection the Solr collection to send the commit to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the * main query searcher, making the changes visible * @param softCommit makes index changes visible while neither fsync-ing index files * nor writing a new index descriptor * * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * * @throws IOException If there is a low-level I/O error. * @throws SolrServerException if there is an error on the server */ public UpdateResponse commit(String collection, boolean waitFlush, boolean waitSearcher, boolean softCommit) throws SolrServerException, IOException { return new UpdateRequest() .setAction(UpdateRequest.ACTION.COMMIT, waitFlush, waitSearcher, softCommit) .process(this, collection); }
@Override public UpdateResponse update(UpdateRequest request) throws TranslatorException { try { request.setCommitWithin(-1); request.setAction(UpdateRequest.ACTION.COMMIT, false, false ); return request.process(this.server); } catch (SolrServerException e) { throw new TranslatorException(e); } catch (IOException e) { throw new TranslatorException(e); } }
@Override public UpdateResponse update(UpdateRequest request) throws TranslatorException { try { request.setCommitWithin(-1); request.setAction(UpdateRequest.ACTION.COMMIT, false, false ); return request.process(this.server); } catch (SolrServerException e) { throw new TranslatorException(e); } catch (IOException e) { throw new TranslatorException(e); } }
/** Clear given collection. */ static void clearCollection(String collection, AuthorizedSolrClient client) throws IOException { try { UpdateRequest updateRequest = new UpdateRequest(); updateRequest.setAction(UpdateRequest.ACTION.COMMIT, true, true); updateRequest.deleteByQuery("*:*"); client.process(collection, updateRequest); } catch (SolrServerException e) { throw new IOException(e); } }
/** Inserts the given number of test documents into Solr. */ static void insertTestDocuments(String collection, long numDocs, AuthorizedSolrClient client) throws IOException { List<SolrInputDocument> data = createDocuments(numDocs); try { UpdateRequest updateRequest = new UpdateRequest(); updateRequest.setAction(UpdateRequest.ACTION.COMMIT, true, true); updateRequest.add(data); client.process(collection, updateRequest); } catch (SolrServerException e) { throw new IOException("Failed to insert test documents to collection", e); } }
/** * Forces a commit of the given collection to make recently inserted documents available for * search. * * @return The number of docs in the index */ static long commitAndGetCurrentNumDocs(String collection, AuthorizedSolrClient client) throws IOException { SolrQuery solrQuery = new SolrQuery("*:*"); solrQuery.setRows(0); try { UpdateRequest update = new UpdateRequest(); update.setAction(UpdateRequest.ACTION.COMMIT, true, true); client.process(collection, update); return client.query(collection, new SolrQuery("*:*")).getResults().getNumFound(); } catch (SolrServerException e) { throw new IOException(e); } }