public DataAccess<? extends FeatureType, ? extends Feature> getDataStore( ProgressListener listener) throws IOException { return catalog.getResourcePool().getDataStore(this); }
@Test public void testPropertyDataStoreRelativeUrl() throws IOException { // create dir File testDS = new File(testData.getDataDirectoryRoot(), "testDS").getCanonicalFile(); testDS.mkdir(); HashMap params = new HashMap(); params.put(PropertyDataStoreFactory.DIRECTORY.key, "file:./testDS"); params.put(PropertyDataStoreFactory.NAMESPACE.key, "http://www.geotools.org/test"); DataStoreInfoImpl info = new DataStoreInfoImpl(getGeoServer().getCatalog()); info.setConnectionParameters(params); DataAccessFactory f = getGeoServer().getCatalog().getResourcePool().getDataStoreFactory(info); assertNotNull(f); assertTrue(f instanceof PropertyDataStoreFactory); DataAccess store = getGeoServer().getCatalog().getResourcePool().getDataStore(info); assertEquals( testDS.toURI().toString().toLowerCase(), store.getInfo().getSource().toString().replace("/./", "/").toLowerCase()); } }
FeatureType getCacheableFeatureType(FeatureTypeInfo info, boolean handleProjectionPolicy) throws IOException { String key = getFeatureTypeInfoKey(info, handleProjectionPolicy); FeatureType ft = featureTypeCache.get(key); if (ft == null) { synchronized (featureTypeCache) { ft = featureTypeCache.get(key); if (ft == null) { // grab the underlying feature type DataAccess<? extends FeatureType, ? extends Feature> dataAccess = getDataStore(info.getStore()); FeatureTypeCallback initializer = getFeatureTypeInitializer(info, dataAccess); if (initializer != null) { initializer.initialize(info, dataAccess, null); } // ft = jstore.getSchema(vt.getName()); ft = dataAccess.getSchema(info.getQualifiedNativeName()); ft = buildFeatureType(info, handleProjectionPolicy, ft); featureTypeCache.put(key, ft); } } } return ft; }
@Test public void testWfsCascadeEntityExpansion() throws Exception { CatalogBuilder cb = new CatalogBuilder(getCatalog()); DataStoreInfo ds = cb.buildDataStore("wfs-xxe"); URL url = getClass().getResource("wfs1.1.0Capabilities-xxe.xml"); ds.getConnectionParameters().put(WFSDataStoreFactory.URL.key, url); // required or the store won't fetch caps from a file ds.getConnectionParameters().put("TESTING", Boolean.TRUE); final ResourcePool rp = getCatalog().getResourcePool(); try { rp.getDataStore(ds); fail("Store creation should have failed to to XXE attack"); } catch (Exception e) { String message = e.getMessage(); assertThat(message, containsString("Entity resolution disallowed")); assertThat(message, containsString("file:///file/not/there")); } }
private FeatureType getNonCacheableFeatureType( FeatureTypeInfo info, boolean handleProjectionPolicy) throws IOException { FeatureType ft = null; // grab the underlying feature type DataAccess<? extends FeatureType, ? extends Feature> dataAccess = getDataStore(info.getStore()); FeatureTypeCallback initializer = getFeatureTypeInitializer(info, dataAccess); Name temporaryName = null; if (initializer != null) { temporaryName = getTemporaryName(info, dataAccess, initializer); } ft = dataAccess.getSchema( temporaryName != null ? temporaryName : info.getQualifiedNativeName()); ft = buildFeatureType(info, handleProjectionPolicy, ft); // Remove layer configuration from datastore if (initializer != null && temporaryName != null) { initializer.dispose(info, dataAccess, temporaryName); } return ft; }
dataStore = getDataStore(ds); } catch (IOException e) { LOGGER.log(
DataAccess<? extends FeatureType, ? extends Feature> dataStore = pool.getDataStore(info); assertNotNull(dataStore); assertFalse(disposeCalled); dataStore = pool.getDataStore(info); assertNotNull(dataStore); disposeCalled = false;
FeatureTypeInfo info, Hints hints) throws IOException { DataAccess<? extends FeatureType, ? extends Feature> dataAccess = getDataStore(info.getStore());
dataStore = catalog.getResourcePool().getDataStore(info); LOGGER.finer("connection parameters verified for store " + info.getName() + ". Got a " + dataStore.getClass().getName());
dataStore = catalog.getResourcePool().getDataStore(info); LOGGER.finer( "connection parameters verified for store "
@Override public void handleRemoveEvent(CatalogRemoveEvent event) throws CatalogException { // remove the configuration if the layer is a SOLR one if (event.getSource() instanceof FeatureTypeInfo) { FeatureTypeInfo ft = (FeatureTypeInfo) event.getSource(); Serializable config = ft.getMetadata().get(SolrLayerConfiguration.KEY); if (config instanceof SolrLayerConfiguration) { SolrLayerConfiguration slc = (SolrLayerConfiguration) config; // go directly to the resource pool to avoid security wrappers try { DataAccess<? extends FeatureType, ? extends Feature> dataStore = catalog.getResourcePool().getDataStore(ft.getStore()); if (dataStore instanceof SolrDataStore) { SolrDataStore solr = (SolrDataStore) dataStore; solr.getSolrConfigurations().remove(slc.getLayerName()); } } catch (IOException e) { throw new CatalogException( "Failed to remove layer configuration from data store", e); } } } }
private void updateSolrConfiguration(FeatureTypeInfo ft, SolrLayerConfiguration slc) { // go directly to the resource pool to avoid security wrappers try { DataAccess<? extends FeatureType, ? extends Feature> dataStore = catalog.getResourcePool().getDataStore(ft.getStore()); if (dataStore instanceof SolrDataStore) { SolrDataStore solr = (SolrDataStore) dataStore; solr.getSolrConfigurations().remove(slc.getLayerName()); slc.setLayerName(ft.getName()); solr.setSolrConfigurations(slc); } } catch (IOException e) { throw new CatalogException("Failed to remove layer configuration from data store", e); } FeatureTypeInfo proxy = catalog.getFeatureType(ft.getId()); proxy.setNativeName(ft.getName()); proxy.getMetadata().put(SolrLayerConfiguration.KEY, slc); catalog.save(proxy); }