/** * Creates a CoreAdminHandler for this MultiCore. * @return a CoreAdminHandler */ protected CoreAdminHandler createMultiCoreHandler(final String adminHandlerClass) { SolrResourceLoader loader = new SolrResourceLoader(null, libLoader, null); Object obj = loader.newAdminHandlerInstance(CoreContainer.this, adminHandlerClass); if ( !(obj instanceof CoreAdminHandler)) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "adminHandlerClass is not of type "+ CoreAdminHandler.class ); } return (CoreAdminHandler) obj; }
/** * @deprecated Use {@link SolrResourceLoader#getConfigDir()} instead. */ @Deprecated public String getConfigDir() { return loader.getConfigDir(); }
@Override public InputStream openResource(String resourceName) { ByteBuffer resourceValue = null; try { resourceValue = SolandraCoreContainer.readCoreResource(coreName, resourceName); } catch (Exception e) { throw new RuntimeException("Error opening " + resourceName, e); } //fallback to solr.home if (resourceValue == null) { return super.openResource(resourceName); } else { return new ByteArrayInputStream(ByteBufferUtil.getArray(resourceValue)); } } }
public void inform(SolrCore core) { if (initArgs != null) { String tikaConfigLoc = (String) initArgs.get(CONFIG_LOCATION); if (tikaConfigLoc != null) { File configFile = new File(tikaConfigLoc); if (configFile.isAbsolute() == false) { configFile = new File(core.getResourceLoader().getConfigDir(), configFile.getPath()); throw new SolrException(ErrorCode.SERVER_ERROR, e); config = TikaConfig.getDefaultConfig(); } catch (TikaException e) { throw new SolrException(ErrorCode.SERVER_ERROR, e); NamedList configDateFormats = (NamedList) initArgs.get(DATE_FORMATS); if (configDateFormats != null && configDateFormats.size() > 0) { dateFormats = new HashSet<String>(); while (configDateFormats.iterator().hasNext()) { config = TikaConfig.getDefaultConfig(); } catch (TikaException e) { throw new SolrException(ErrorCode.SERVER_ERROR, e);
LOG.info("Initializing spell checkers"); boolean hasDefault = false; for (int i = 0; i < initParams.size(); i++) { if (initParams.getName(i).equals("spellchecker")) { NamedList spellchecker = (NamedList) initParams.getVal(i); String className = (String) spellchecker.get("classname"); if (className == null) className = IndexBasedSpellChecker.class.getName(); SolrResourceLoader loader = core.getResourceLoader(); SolrSpellChecker checker = (SolrSpellChecker) loader.newInstance(className); if (checker != null) { String dictionary = checker.init(spellchecker, core); core.registerFirstSearcherListener(new SpellCheckerListener(core, checker, false, false)); boolean buildOnCommit = Boolean.parseBoolean((String) spellchecker.get("buildOnCommit")); boolean buildOnOptimize = Boolean.parseBoolean((String) spellchecker.get("buildOnOptimize")); if (buildOnCommit || buildOnOptimize) { LOG.info("Registering newSearcher listener for spellchecker: " + checker.getDictionaryName()); core.registerNewSearcherListener(new SpellCheckerListener(core, checker, buildOnCommit, buildOnOptimize)); IndexSchema schema = core.getSchema(); String fieldTypeName = (String) initParams.get("queryAnalyzerFieldType"); FieldType fieldType = schema.getFieldTypes().get(fieldTypeName); Analyzer analyzer = fieldType == null ? new WhitespaceAnalyzer() : fieldType.getQueryAnalyzer();
@Override public CoreContainer initialize() { CoreContainer container = new CoreContainer(new SolrResourceLoader(SolrResourceLoader.locateSolrHome())); CoreDescriptor dcore = new CoreDescriptor(container, coreName, solrConfig.getResourceLoader().getInstanceDir()); dcore.setConfigName(solrConfig.getResourceName()); dcore.setSchemaName(indexSchema.getResourceName()); SolrCore core = new SolrCore( null, dataDirectory, solrConfig, indexSchema, dcore); container.register(coreName, core, false); return container; } }
boolean searchHasDefault = false; boolean documentHasDefault = false; for (int i = 0; i < initParams.size(); i++) { if (initParams.getName(i).equals("engine")) { NamedList engineNL = (NamedList) initParams.getVal(i); String className = (String) engineNL.get("classname"); if (className == null) { className = CarrotClusteringEngine.class.getName(); SolrResourceLoader loader = core.getResourceLoader(); ClusteringEngine clusterer = (ClusteringEngine) loader.newInstance(className); if (clusterer != null) { String name = clusterer.init(engineNL, core);
is = new InputSource(new StringReader(dataConfigText)); } else if(dataconfigFile!=null) { is = new InputSource(core.getResourceLoader().openResource(dataconfigFile)); is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile)); log.info("Loading DIH Configuration: " + dataconfigFile); if(defaultParams!=null) { int position = 0; while (position < defaultParams.size()) { if (defaultParams.getName(position) == null) { break; String name = defaultParams.getName(position); if (name.equals("datasource")) { success = true;
SolrResourceLoader loader = core.getResourceLoader(); Boolean optional = engineInitParams.getBooleanArg("optional"); optional = (optional == null ? Boolean.FALSE : optional); (String) engineInitParams.get("classname"), CarrotClusteringEngine.class.getName()); final ClusteringEngine engine = loader.newInstance(engineClassName, ClusteringEngine.class); final String name = StringUtils.defaultIfBlank(engine.init(engineInitParams, core), ""); log.info("Optional clustering engine not available: " + name); } else { throw new SolrException(ErrorCode.SERVER_ERROR, "A required clustering engine failed to initialize, check the logs: " + name);
SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrHomeDir.toString()), null, null); solrHomeDir, solrHomeDir.toUri(), loader.getInstancePath(), loader.getConfigDir(), dataDirStr, outputShardDir)); System.setProperty("solr.autoSoftCommit.maxTime", "-1"); CoreContainer container = new CoreContainer(loader); container.load(); SolrCore core = container.create("core1", Paths.get(solrHomeDir.toString()), ImmutableMap.of(CoreDescriptor.CORE_DATADIR, dataDirStr), false); if (!(core.getDirectoryFactory() instanceof HdfsDirectoryFactory)) { throw new UnsupportedOperationException( "Invalid configuration. Currently, the only DirectoryFactory supported is "
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "can not find logging file: "+loggingConfig ); instanceDir = SolrResourceLoader.locateInstanceDir(); CoreContainer cores = new CoreContainer(new SolrResourceLoader(instanceDir)); SolrConfig solrConfig = new SolrConfig(instanceDir, SolrConfig.DEFAULT_CONF_FILE, null); CoreDescriptor dcore = new CoreDescriptor(cores, "", solrConfig.getResourceLoader().getInstanceDir()); IndexSchema indexSchema = new IndexSchema(solrConfig, instanceDir+"/conf/schema.xml", null); core = new SolrCore( null, dataDir, solrConfig, indexSchema, dcore); cores.register("", core, false); parser = new SolrRequestParsers( solrConfig );
synonymAnalyzers = new HashMap<>(); Object xmlSynonymAnalyzers = args.get("synonymAnalyzers"); tokenizerFactory = loader.newInstance(className, TokenizerFactory.class, new String[]{}, new Class[] { Map.class }, new Object[] { params }); filterFactory = loader.newInstance(className, TokenFilterFactory.class, new String[]{}, new Class[] { Map.class }, new Object[] { params }); throw new SolrException(ErrorCode.SERVER_ERROR, "tokenizer must not be null for synonym analyzer: " + analyzerName); } else if (filterFactories.isEmpty()) { throw new SolrException(ErrorCode.SERVER_ERROR, "filter factories must be defined for synonym analyzer: " + analyzerName); throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to create parser. Check your config.", e);
public String init(NamedList config, SolrCore core) { super.init(config, core); indexDir = (String) config.get(INDEX_DIR); String accuracy = (String) config.get(ACCURACY); indexDir = core.getDataDir() + File.separator + indexDir; sourceLocation = (String) config.get(LOCATION); field = (String) config.get(FIELD); String strDistanceName = (String)config.get(STRING_DISTANCE); StringDistance sd = null; if (strDistanceName != null) { sd = (StringDistance) core.getResourceLoader().newInstance(strDistanceName); if (field != null && core.getSchema().getFieldTypeNoEx(field) != null) { analyzer = core.getSchema().getFieldType(field).getQueryAnalyzer(); fieldTypeName = (String) config.get(FIELD_TYPE); if (core.getSchema().getFieldTypes().containsKey(fieldTypeName)) { FieldType fieldType = core.getSchema().getFieldTypes().get(fieldTypeName); analyzer = fieldType.getQueryAnalyzer();
public CoreContainer initialize() throws IOException, ParserConfigurationException, SAXException { CoreContainer cores = null; String solrHome = SolrResourceLoader.locateSolrHome(); File fconf = new File(solrHome, solrConfigFilename == null? "solr.xml": solrConfigFilename); log.info("looking for solr.xml: " + fconf.getAbsolutePath()); cores = new CoreContainer(); cores.load(solrHome, fconf); abortOnConfigurationError = false; for (SolrCore c : cores.getCores()) { if (c.getSolrConfig().getBool("abortOnConfigurationError", false)) { abortOnConfigurationError = true; break; CoreDescriptor dcore = new CoreDescriptor(cores, "", "."); dcore.setCoreProperties(null); SolrResourceLoader resourceLoader = new SolrResourceLoader(solrHome, null, getCoreProps(solrHome, null,dcore.getCoreProperties())); cores.loader = resourceLoader; SolrConfig cfg = solrConfigFilename == null ? new SolrConfig(resourceLoader, SolrConfig.DEFAULT_CONF_FILE,null) : new SolrConfig(resourceLoader, solrConfigFilename,null); IndexSchema schema = indexSchemaFilename != null ? new IndexSchema(cfg, indexSchemaFilename, null) : null; SolrCore singlecore = new SolrCore(null, null, cfg, schema, dcore); abortOnConfigurationError = cfg.getBool( "abortOnConfigurationError", abortOnConfigurationError);
LOG.info("init: " + config); String name = super.init(config, core); threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f : (Float)config.get(THRESHOLD_TOKEN_FREQUENCY); sourceLocation = (String) config.get(LOCATION); lookupImpl = (String)config.get(LOOKUP_IMPL); IndexSchema schema = core.getLatestSchema(); suggestionAnalyzerFieldTypeName = (String)config.get(SUGGESTION_ANALYZER_FIELDTYPE); if (schema.getFieldTypes().containsKey(suggestionAnalyzerFieldTypeName)) { FieldType fieldType = schema.getFieldTypes().get(suggestionAnalyzerFieldTypeName); suggestionAnalyzer = fieldType.getQueryAnalyzer(); factory = core.getResourceLoader().newInstance(lookupImpl, LookupFactory.class); storeDir = new File(store); if (!storeDir.isAbsolute()) { storeDir = new File(core.getDataDir() + File.separator + storeDir);
protected NamedList<Object> getCoreStatus(CoreContainer cores, String cname) throws IOException { NamedList<Object> info = new SimpleOrderedMap<Object>(); SolrCore core = cores.getCore(cname); if (core != null) { try { info.add("name", core.getName()); info.add("instanceDir", normalizePath(core.getResourceLoader().getInstanceDir())); info.add("dataDir", normalizePath(core.getDataDir())); info.add("startTime", new Date(core.getStartTime())); info.add("uptime", System.currentTimeMillis() - core.getStartTime()); RefCounted<SolrIndexSearcher> searcher = core.getSearcher(); try { info.add("index", LukeRequestHandler.getIndexInfo(searcher.get().getReader(), false)); } finally { searcher.decref(); } } finally { core.close(); } } return info; }
SolrResourceLoader loader = new SolrResourceLoader(solrHomeDir.toString(), null, null); solrHomeDir, solrHomeDir.toUri(), loader.getInstanceDir(), loader.getConfigDir(), dataDirStr, outputShardDir)); System.setProperty("solr.autoSoftCommit.maxTime", "-1"); CoreContainer container = new CoreContainer(loader); container.load(); SolrCore core = container.create(descr); if (!(core.getDirectoryFactory() instanceof HdfsDirectoryFactory)) { throw new UnsupportedOperationException( "Invalid configuration. Currently, the only DirectoryFactory supported is "
final SolrResourceLoader loader = req.getCore().getResourceLoader(); File configdir = new File( loader.getConfigDir() ); String fname = req.getParams().get("file", null); if( fname == null ) { fname = fname.replace( '\\', '/' ); // normalize slashes if( hiddenFiles.contains( fname.toUpperCase() ) ) { throw new SolrException( ErrorCode.FORBIDDEN, "Can not access: "+fname ); throw new SolrException( ErrorCode.FORBIDDEN, "Invalid path: "+fname ); throw new SolrException( ErrorCode.BAD_REQUEST, "Can not find: "+adminFile.getName() + " ["+adminFile.getAbsolutePath()+"]" ); files.add( path, fileInfo ); if( f.isDirectory() ) { fileInfo.add( "directory", true );
/** * For configuration files, checksum of the file is included because, unlike index files, they may have same content * but different timestamps. * <p/> * The local conf files information is cached so that everytime it does not have to compute the checksum. The cache is * refreshed only if the lastModified of the file changes */ List<Map<String, Object>> getConfFileInfoFromCache(NamedList<String> nameAndAlias, final Map<String, FileInfo> confFileInfoCache) { List<Map<String, Object>> confFiles = new ArrayList<Map<String, Object>>(); synchronized (confFileInfoCache) { File confDir = new File(core.getResourceLoader().getConfigDir()); Checksum checksum = null; for (int i = 0; i < nameAndAlias.size(); i++) { String cf = nameAndAlias.getName(i); File f = new File(confDir, cf); if (!f.exists() || f.isDirectory()) continue; //must not happen FileInfo info = confFileInfoCache.get(cf); if (info == null || info.lastmodified != f.lastModified() || info.size != f.length()) { if (checksum == null) checksum = new Adler32(); info = new FileInfo(f.lastModified(), cf, f.length(), getCheckSum(checksum, f)); confFileInfoCache.put(cf, info); } Map<String, Object> m = info.getAsMap(); if (nameAndAlias.getVal(i) != null) m.put(ALIAS, nameAndAlias.getVal(i)); confFiles.add(m); } } return confFiles; }
final SolrParams initParams = config.toSolrParams(); new ClassLoaderLocator(core.getResourceLoader().getClassLoader())); this.clusteringAlgorithmClass = core.getResourceLoader().findClass( carrotAlgorithmClassName, IClusteringAlgorithm.class); } catch (SolrException s) { if (!(s.getCause() instanceof ClassNotFoundException)) { throw s; withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> { try { AttributeValueSets avs = AttributeValueSets.deserialize(attributeXmls[0].open()); initAttributes.putAll(defaultSet.getAttributeValues()); } catch (Exception e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Could not read attributes XML for clustering component: " + componentName, e); withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> this.controller.init(initAttributes)); SchemaField uniqueField = core.getLatestSchema().getUniqueKeyField(); if (uniqueField == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, CarrotClusteringEngine.class.getSimpleName() + " requires the schema to have a uniqueKeyField");