Refine search
public static DocRouter getDocRouter(String routerName) { DocRouter router = routerMap.get(routerName); if (router != null) return router; throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown document router '"+ routerName + "'"); }
/** get the param from params, fail if not found **/ @Override public String get(String param) { String val = params.get(param); if( val == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Missing required parameter: "+param ); } return val; }
private void unregister(String key, SolrInfoMBean infoBean) { if (server == null) return; try { ObjectName name = getObjectName(key, infoBean); if (server.isRegistered(name)) { server.unregisterMBean(name); } else { LOG.info("Failed to unregister mbean: " + key + " because it was not registered"); } } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed to unregister info bean: " + key, e); } }
config = new SolrConfig(loader, dcore.getConfigName(), null); } catch (Exception e) { log.error("Failed to load file {}", new File(instanceDir, dcore.getConfigName()).getAbsolutePath()); throw new SolrException(ErrorCode.SERVER_ERROR, "Could not load config for " + dcore.getConfigName(), e); + new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT).format(new Date( schemaFile.lastModified())); schema = indexSchemaCache.get(key); if (schema == null) { log.info("creating new schema object for core: " + dcore.getProperty(CoreDescriptor.CORE_NAME)); schema = IndexSchemaFactory.buildIndexSchema(dcore.getSchemaName(), config); indexSchemaCache.put(key, schema); } else { log.info("re-using schema object for core: " + dcore.getProperty(CoreDescriptor.CORE_NAME));
Map<String, ElevationObj> getElevationMap( IndexReader reader, SolrCore core ) throws Exception { synchronized( elevationCache ) { Map<String, ElevationObj> map = elevationCache.get( null ); if (map != null) return map; map = elevationCache.get( reader ); if( map == null ) { String f = initArgs.get( CONFIG_FILE ); if( f == null ) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent must specify argument: "+CONFIG_FILE ); } log.info( "Loading QueryElevation from data dir: "+f ); InputStream is = VersionedFile.getLatestFile( core.getDataDir(), f ); Config cfg = new Config( core.getResourceLoader(), f, is, null ); map = loadElevationMap( cfg ); elevationCache.put( reader, map ); } return map; } }
if (subpackages == null || subpackages.length == 0 || subpackages == packages) { subpackages = packages; String c = classNameCache.get(cname); if(c != null) { try { } catch (ClassNotFoundException e) { log.error("Unable to load cached class-name : "+ c +" for shortname : "+cname + e); throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading class '" + cname + "'", e, false); }finally{
SESSION_WRAPPPER_REF.set(sessionWrapper = getSession(delegatingManager)); } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "unable to get autoscaling policy session", e); ReplicaInfo replicaInfo = details.getOrDefault(collName, emptyMap()).getOrDefault(shardName, singletonList(null)).get(0); if (replicaInfo != null) { Object idxSz = replicaInfo.getVariables().get(FREEDISK.perReplicaValue); if (idxSz != null) { diskSpaceReqd.put(shardName, 1.5 * (Double) Variable.Type.FREEDISK.validate(null, idxSz, false)); if (diskSpaceReqd.get(shardName) != null) { suggester.hint(Hint.MINFREEDISK, diskSpaceReqd.get(shardName)); String errorId = "AutoScaling.error.diagnostics." + System.nanoTime(); Policy.Session sessionCopy = suggester.session; log.error("errorId : " + errorId + " " + handleExp(log, "", () -> Utils.writeJson(getDiagnostics(sessionCopy), new StringWriter(), true).toString())); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, " No node can satisfy the rules " + Utils.toJSONString(Utils.getDeepCopy(session.expandedClauses, 4, true) + " More details from logs in node : " + Utils.getMDCNode() + ", errorId : " + errorId)); positions.add(new ReplicaPosition(shardName, ++idx, e.getKey(), op.getParams().get(NODE)));
/** * Swaps two SolrCore descriptors. * @param n0 * @param n1 */ public void swap(String n0, String n1) { if( n0 == null || n1 == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Can not swap unnamed cores." ); } synchronized( cores ) { SolrCore c0 = cores.get(n0); SolrCore c1 = cores.get(n1); if (c0 == null) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core: " + n0 ); if (c1 == null) throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core: " + n1 ); cores.put(n0, c1); cores.put(n1, c0); c0.setName(n1); c1.setName(n0); } log.info("swaped: "+n0 + " with " + n1); }
LOG.info("New index installed. Updating index properties..."); File idxprops = new File(solrCore.getDataDir() + "index.properties"); Properties p = new Properties(); p.load(is); } catch (Exception e) { LOG.error("Unable to load index.properties"); } finally { IOUtils.closeQuietly(is); p.store(os, "index properties"); } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to write index.properties", e); } finally {
/** * Return a formatter appropriate for this field. If a formatter * has not been configured for this field, fall back to the configured * default or the solr default (SimpleHTMLFormatter). * * @param fieldName The name of the field * @param params The params controlling Highlighting * @return An appropriate Formatter. */ protected Formatter getFormatter(String fieldName, SolrParams params ) { String str = params.getFieldParam( fieldName, HighlightParams.FORMATTER ); SolrFormatter formatter = formatters.get( str ); if( formatter == null ) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Unknown formatter: "+str ); } return formatter.getFormatter( fieldName, params ); }
/** * cleanup everything */ private void cleanup() { try { //close the FileOutputStream (which also closes the Channel) fileOutputStream.close(); } catch (Exception e) {/* noop */ LOG.error("Error closing the file stream: "+ this.saveAs ,e); } try { post.releaseConnection(); } catch (Exception e) { } if (bytesDownloaded != size) { //if the download is not complete then //delete the file being downloaded try { file.delete(); } catch (Exception e) { LOG.error("Error deleting file in cleanup" + e.getMessage()); } //if the failure is due to a user abort it is returned nomally else an exception is thrown if (!aborted) throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to download " + fileName + " completely. Downloaded " + bytesDownloaded + "!=" + size); } }
queryPath = baseUri + queryPath; if (in == null) { throw new SolrException (ErrorCode.NOT_FOUND, queryPath + " not found"); } else { try { contents = IOUtils.toString(in); } catch (IOException e) { LoggerFactory.getLogger(AppServerComponent.class).error("An error occurred while reading " + queryPath, e); if (scheme.equals("lux")) { throw new SolrException (ErrorCode.NOT_FOUND, queryPath + " not found (actually lux: scheme is not implemented)"); } else { InputStream in = null; File f = new File(url.getPath()); if (!f.exists()) { throw new SolrException (ErrorCode.NOT_FOUND, f + " not found"); throw new SolrException (ErrorCode.FORBIDDEN, "access to " + f + " denied by rule"); LoggerFactory.getLogger(AppServerComponent.class).error("URL scheme not supported: " + url.getProtocol()); LoggerFactory.getLogger(AppServerComponent.class).error("An error occurred while reading " + url, e);
String carrotAlgorithmClassName = initParams.get(CarrotParams.ALGORITHM); try { this.clusteringAlgorithmClass = core.getResourceLoader().findClass( String componentName = initParams.get(ClusteringEngine.ENGINE_NAME); log.info("Initializing Clustering Engine '" + MoreObjects.firstNonNull(componentName, "<no 'name' attribute>") + "'"); initAttributes.putAll(defaultSet.getAttributeValues()); } catch (Exception e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Could not read attributes XML for clustering component: " + componentName, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, CarrotClusteringEngine.class.getSimpleName() + " requires the schema to have a uniqueKeyField");
public QParserPlugin getQueryPlugin(String parserName) { QParserPlugin plugin = qParserPlugins.get(parserName); if (plugin != null) return plugin; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown query type '"+parserName+"'"); }
config = new SolrConfig(loader, dcore.getConfigName(), null); } catch (Exception e) { log.error("Failed to load file {}", new File(instanceDir, dcore.getConfigName()).getAbsolutePath()); throw new SolrException(ErrorCode.SERVER_ERROR, "Could not load config for " + dcore.getConfigName(), e); + new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT).format(new Date( schemaFile.lastModified())); schema = indexSchemaCache.get(key); if (schema == null) { log.info("creating new schema object for core: " + dcore.getProperty(CoreDescriptor.CORE_NAME)); schema = IndexSchemaFactory.buildIndexSchema(dcore.getSchemaName(), config); indexSchemaCache.put(key, schema); } else { log.info("re-using schema object for core: " + dcore.getProperty(CoreDescriptor.CORE_NAME));
public void inform(SolrCore core) String a = initArgs.get( FIELD_TYPE ); if( a != null ) { FieldType ft = core.getSchema().getFieldTypes().get( a ); if( ft == null ) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Unknown FieldType: '"+a+"' used in QueryElevationComponent" ); throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent requires the schema to have a uniqueKeyField" ); synchronized( elevationCache ) { elevationCache.clear(); String f = initArgs.get( CONFIG_FILE ); if( f == null ) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent must specify argument: '"+CONFIG_FILE +"' -- path to elevate.xml" ); File fD = new File( core.getDataDir(), f ); if( fC.exists() == fD.exists() ) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent missing config file: '"+f + "\n" +"either: "+fC.getAbsolutePath() + " or " + fD.getAbsolutePath() + " must exist, but not both." ); throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error initializing QueryElevationComponent.", ex );
if (subpackages == null || subpackages.length == 0 || subpackages == packages) { subpackages = packages; String c = classNameCache.get(cname); if(c != null) { try { } catch (ClassNotFoundException e) { log.error("Unable to load cached class-name : "+ c +" for shortname : "+cname + e); throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading class '" + cname + "'", e); }finally{
private void downloadConfFiles(List<Map<String, Object>> confFilesToDownload, long latestVersion) throws Exception { LOG.info("Starting download of configuration files from master: " + confFilesToDownload); confFilesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>()); File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(), "conf." + getDateAsStr(new Date())); try { boolean status = tmpconfDir.mkdirs(); if (!status) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed to create temporary config folder: " + tmpconfDir.getName()); } for (Map<String, Object> file : confFilesToDownload) { String saveAs = (String) (file.get(ALIAS) == null ? file.get(NAME) : file.get(ALIAS)); fileFetcher = new FileFetcher(tmpconfDir, file, saveAs, true, latestVersion); currentFile = file; fileFetcher.fetchFile(); confFilesDownloaded.add(new HashMap<String, Object>(file)); } // this is called before copying the files to the original conf dir // so that if there is an exception avoid corrupting the original files. terminateAndWaitFsyncService(); copyTmpConfFiles2Conf(tmpconfDir); } finally { delTree(tmpconfDir); } }
/** * Load the datatype analyzer config file specified by the schema. * <br/> * This should be called whenever the datatype analyzer configuration file changes. */ private void loadDatatypeConfig(final SolrResourceLoader resourceLoader) { InputStream is; log.info("Loading datatype analyzer configuration file at " + datatypeAnalyzerConfigPath); try { is = resourceLoader.openResource(datatypeAnalyzerConfigPath); } catch (final IOException e) { log.error("Error loading datatype analyzer configuration file at " + datatypeAnalyzerConfigPath, e); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } try { final SirenDatatypeAnalyzerConfig newConfig = new SirenDatatypeAnalyzerConfig(resourceLoader, datatypeAnalyzerConfigPath, new InputSource(is), this.luceneDefaultVersion); log.info("Read new datatype analyzer configuration " + newConfig); datatypeConfigRef.set(newConfig); } finally { if (is != null) { try { is.close(); } catch (final IOException ignored) { } } } }
/** * Load QNames mapping from the properties file * <p> * The mapping file contains lines such as: * <ul> * <li> foaf=http://xmlns.com/foaf/0.1/ * </ul> * which means that the qname <code>foaf:name</code> will be expanded to * <code>http://xmlns.com/foaf/0.1/name</code>. */ protected void loadQNamesFile(final ResourceLoader loader) { try { logger.info("Loading of the QNames mapping file: {}", qnamesFile); qnames = new Properties(); qnames.load(loader.openResource(qnamesFile)); } catch (final IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Loading of the QNames mapping file failed: [" + qnamesFile + "]", e); } }