@SuppressWarnings("unchecked") private Gatherer loadGatherer(String className, ClassLoader classLoader) { try { Class<? extends Gatherer> gathererClass = (Class<? extends Gatherer>) classLoader.loadClass(className); try { return gathererClass.getConstructor(Settings.class).newInstance(settings); } catch (NoSuchMethodException e) { try { return gathererClass.getConstructor().newInstance(); } catch (NoSuchMethodException e1) { throw new ElasticSearchException("No constructor for [" + gathererClass + "]. A gatherer class must " + "have either an empty default constructor or a single argument constructor accepting a " + "Settings instance"); } } } catch (Exception e) { throw new ElasticSearchException("Failed to load gatherer class [" + className + "]", e); } }
public static void handleElasticSearchException(Class<?> clazz, ElasticSearchException e) { if (RestStatus.BAD_REQUEST.equals(e.status())) { LOGGER.error(clazz.getCanonicalName() + " - Bad request: " + e.getMessage() + ". DO NOT RETRY!"); } else { throw new FailedException("Cannot create index via ES: " + e.getMessage()); } } }
indexForDoc -= storageArrayIndex << MAX_STORAGE_SIZE_SHIFT; if (indexForDoc >= storageArray.length) throw new ElasticSearchException( String.format("Ordinal overflow for docId %s. storageArrayIndex: %s, storageArray.length: %s," + " indexForDoc:%s, currentIndexForDocs[docId]: %s, firstDoc: %s", firstDoc)); if (storageArray[indexForDoc] !=0 ) throw new ElasticSearchException( String.format("Ordinal overflow for docId %s.", docId)); throw new ElasticSearchException( String.format("We expected one ordinal for docId %s but got more.", docId));
/** * Method to recursively create a nested object */ private void writeMap(Map<String, Object> root, Object value, String part) { if (part.contains(".")) { String[] parts = part.split("\\.", 2); Object o = root.get(parts[0]); if (o == null) { o = new HashMap<String, Object>(); } else if (!(o instanceof Map)) { throw new ElasticSearchException("Error on rewriting objects: Mixed objects and values"); } Map<String, Object> sub = (Map<String, Object>) o; writeMap(sub, value, parts[1]); root.put(parts[0], sub); } else { if (((Map<String, Object>) root).get(part) instanceof Map) { throw new ElasticSearchException("Error on rewriting objects: Mixed objects and values"); } root.put(part, value); } }
public InputStream getConnectionInputstream(String lastUpdateTimestamp) throws IOException { URL url = null; HttpURLConnection connection = null; if (lastUpdateTimestamp == null) { url = new URL(riverUrl); } else { char delimiter = riverUrl.contains("?") ? '&' : '?'; url = new URL(riverUrl + delimiter + TIMESTAMP_PARAMETER + "=" + URLEncoder.encode(lastUpdateTimestamp, "UTF-8")); } connection = (HttpURLConnection) url.openConnection(); connection.setUseCaches(false); connection.setConnectTimeout(TIMEOUT); connection.setReadTimeout(TIMEOUT); if (connection.getResponseCode() != 200) { String message = String.format("River endpoint problem for url %s: Connection response code was %s %s", url, connection.getResponseCode(), connection.getResponseMessage()); throw new ElasticSearchException(message); } return connection.getInputStream(); }
String name = request.getRequest().getName(); if (name == null) { throw new ElasticSearchException("no name given"); throw new ElasticSearchException("no path given"); throw new ElasticSearchException("no bytes in request"); throw new ElasticSearchException("refusing cowardly to overwrite existing path: " + dir.getAbsolutePath()); throw new ElasticSearchException(e.getMessage());
throw new ElasticSearchException("::" ,e);
DistinctCountPayload mergeInto(final TLongObjectMap<DistinctCountPayload> map, final long key) { if(map.containsKey(key)) try { map.put(key, this.merge(map.get(key))); } catch(final CardinalityMergeException e) { throw new ElasticSearchException("Unable to merge two facet cardinality objects", e); } else map.put(key, this); return this; }
<K> DistinctCountPayload mergeInto(final ExtTHashMap<K, DistinctCountPayload> map, final K key) { if(map.containsKey(key)) try { map.put(key, this.merge(map.get(key))); } catch(final CardinalityMergeException e) { throw new ElasticSearchException("Unable to merge two facet cardinality objects", e); } else map.put(key, this); return this; }
@Override protected SuggestRequest request() { SuggestRequest suggestRequest = super.request(); try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); suggest.toXContent(builder, ToXContent.EMPTY_PARAMS); suggestRequest.suggest(builder.bytes()); } catch (IOException e) { throw new ElasticSearchException("Unable to build suggestion request", e); } return suggestRequest; }
@Inject public HunspellStemFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) { super(index, indexSettings, name, settings); this.name = name; this.locale = settings.get("locale", "en_US"); this.ignoreCase = settings.getAsBoolean("ignoreCase", Boolean.TRUE); this.dedup = settings.getAsBoolean("dedup", Boolean.TRUE); if (!locales.contains(locale)) { throw new ElasticSearchException("invalid locale '" + locale + "' for hunspell aff/dic"); } try { InputStream affixStream = HunspellStemFilterFactory.class.getResourceAsStream(locale + ".aff"); InputStream dictStream = HunspellStemFilterFactory.class.getResourceAsStream(locale + ".dic"); this.dictionary = new HunspellDictionary(affixStream, dictStream, version, ignoreCase); affixStream.close(); dictStream.close(); } catch (IOException ex) { logger.error("hunspell aff/dic stream I/O error for locale " + locale, ex); } catch (ParseException ex) { logger.error("hunspell aff/dic stream parse failure for locale " + locale, ex); } }
throw new ElasticSearchException( String.format("Number of values for doc %s has a exceeded the maximum allowed " + "(got %s values, max %s)",
throw new ElasticSearchException("failed to analyze", e); } finally { if (stream != null) {