private static FilterCapabilities getFilterCapabilities(FeatureSource mappedSource) throws IllegalArgumentException { FilterCapabilities capabilities = null; if (mappedSource instanceof JDBCFeatureSource) { capabilities = ((JDBCFeatureSource) mappedSource).getDataStore().getFilterCapabilities(); } else if (mappedSource instanceof JDBCFeatureStore) { capabilities = ((JDBCFeatureStore) mappedSource).getDataStore().getFilterCapabilities(); } else { throw new IllegalArgumentException( "Joining queries are only supported on JDBC data stores"); } return capabilities; } }
protected boolean skipTests(Class<?> fClass) { if (!dataStore.getFilterCapabilities().supports(fClass)) { LOGGER.log( Level.INFO, "Function {0} is not natively supported, skipping test", fClass.getSimpleName()); return true; } return false; } }
/** * Splits the filter into two parts, an encodable one, and a non encodable one. The default * implementation uses the filter capabilities to split the filter, subclasses can implement * their own logic if need be. * * @param original * @return */ public Filter[] splitFilter(Filter filter, SimpleFeatureType schema) { PostPreProcessFilterSplittingVisitor splitter = new PostPreProcessFilterSplittingVisitor( dataStore.getFilterCapabilities(), schema, null); filter.accept(splitter, null); Filter[] split = new Filter[2]; split[0] = splitter.getFilterPre(); split[1] = splitter.getFilterPost(); return split; }
/** * Determines if the expression and all its sub expressions are supported. * * @param expression the expression to be tested. * @return true if all sub filters are supported, false otherwise. * @throws IllegalArgumentException If a null filter is passed in. As this function is recursive * a null in a logic filter will also cause an error. */ private boolean fullySupports(Expression expression) { if (expression == null) { throw new IllegalArgumentException("Null expression can not be unpacked"); } FilterCapabilities filterCapabilities = getFilterCapabilities(); if (!filterCapabilities.supports(expression.getClass())) { return false; } // check the known composite expressions if (expression instanceof BinaryExpression) { BinaryExpression be = (BinaryExpression) expression; return fullySupports(be.getExpression1()) && fullySupports(be.getExpression2()); } else if (expression instanceof Function) { Function function = (Function) expression; for (Expression fe : function.getParameters()) { if (!fullySupports(fe)) { return false; } } } return true; }
Filter[] splitFilter(Filter original) { Filter[] split = new Filter[2]; if (original != null) { // create a filter splitter PostPreProcessFilterSplittingVisitor splitter = new PostPreProcessFilterSplittingVisitor( getDataStore().getFilterCapabilities(), null, null); original.accept(splitter, null); split[0] = splitter.getFilterPre(); split[1] = splitter.getFilterPost(); } SimplifyingFilterVisitor visitor = new SimplifyingFilterVisitor(); visitor.setFIDValidator(new PrimaryKeyFIDValidator(this)); split[0] = (Filter) split[0].accept(visitor, null); split[1] = (Filter) split[1].accept(visitor, null); return split; }
private static FilterCapabilities getFilterCapabilities(FeatureSource mappedSource) throws IllegalArgumentException { FilterCapabilities capabilities = null; if (mappedSource instanceof JDBCFeatureSource) { capabilities = ((JDBCFeatureSource) mappedSource).getDataStore() .getFilterCapabilities(); } else if (mappedSource instanceof JDBCFeatureStore) { capabilities = ((JDBCFeatureStore) mappedSource).getDataStore().getFilterCapabilities(); } else { throw new IllegalArgumentException( "Joining queries are only supported on JDBC data stores"); } return capabilities; }
public void testNumericHistogram() throws Exception { // buckets with a size of 100, the function returns an integer from 0 onwards, which // is a zero based bucket number in the bucket sequence FilterFactory ff = dataStore.getFilterFactory(); PropertyName pn = ff.property(aname("energy_consumption")); Expression expression = ff.function("floor", ff.divide(pn, ff.literal(100))); boolean expectOptimized = dataStore.getFilterCapabilities().supports(FilterFunction_floor.class); List<Object[]> value = genericGroupByTestTest(Query.ALL, Aggregate.COUNT, expectOptimized, expression); assertNotNull(value); assertEquals(value.size(), 3); checkValueContains(value, "0", "10"); checkValueContains(value, "1", "1"); checkValueContains(value, "5", "1"); }
public void testTimestampHistogram() throws Exception { // buckets with a size of one day, the function returns an integer from 0 onwards, which // is a zero based bucket number in the bucket sequence FilterFactory ff = dataStore.getFilterFactory(); PropertyName pn = ff.property(aname("last_update")); Date baseDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2016-06-03 00:00:00"); Expression difference = ff.function("dateDifference", pn, ff.literal(baseDate)); int dayInMs = 1000 * 60 * 60 * 24; Expression expression = ff.function("floor", ff.divide(difference, ff.literal(dayInMs))); FilterCapabilities capabilities = dataStore.getFilterCapabilities(); boolean expectOptimized = capabilities.supports(FilterFunction_floor.class) && capabilities.supports(DateDifferenceFunction.class); List<Object[]> value = genericGroupByTestTest(Query.ALL, Aggregate.COUNT, expectOptimized, expression); assertNotNull(value); assertEquals(5, value.size()); checkValueContains(value, "0", "3"); // 2016-06-03 checkValueContains(value, "2", "1"); // 2016-06-05 checkValueContains(value, "3", "2"); // 2016-06-06 checkValueContains(value, "4", "3"); // 2016-06-07 checkValueContains(value, "12", "3"); // 2016-06-15 }
public void testSumArea() throws Exception { FilterFactory ff = dataStore.getFilterFactory(); PropertyName p = ff.property(aname("geom")); SumAreaVisitor v = new MySumAreaVisitor(p); dataStore.getFeatureSource(tname("aggregate")).accepts(Query.ALL, v, null); assertEquals( visited, !dataStore.getFilterCapabilities().supports(FilterFunction_area.class)); assertEquals(30.0, v.getResult().toDouble(), 0.01); }
Filter[] splitFilter(Filter original) { Filter[] split = new Filter[2]; if ( original != null ) { //create a filter splitter PostPreProcessFilterSplittingVisitor splitter = new PostPreProcessFilterSplittingVisitor(getDataStore() .getFilterCapabilities(), null, null); original.accept(splitter, null); split[0] = splitter.getFilterPre(); split[1] = splitter.getFilterPost(); } SimplifyingFilterVisitor visitor = new SimplifyingFilterVisitor(); visitor.setFIDValidator( new PrimaryKeyFIDValidator( this ) ); split[0] = (Filter) split[0].accept(visitor, null); split[1] = (Filter) split[1].accept(visitor, null); return split; }
/** * Helper method for splitting a filter. */ Filter[] splitFilter(Filter original) { Filter[] split = new Filter[2]; if ( original != null ) { //create a filter splitter PostPreProcessFilterSplittingVisitor splitter = new PostPreProcessFilterSplittingVisitor(getDataStore() .getFilterCapabilities(), getSchema(), null); original.accept(splitter, null); split[0] = splitter.getFilterPre(); split[1] = splitter.getFilterPost(); } SimplifyingFilterVisitor visitor = new SimplifyingFilterVisitor(); visitor.setFIDValidator( new PrimaryKeyFIDValidator( this ) ); split[0] = (Filter) split[0].accept(visitor, null); split[1] = (Filter) split[1].accept(visitor, null); return split; }
Filter[] splitFilter(Filter original, FeatureSource source) { JDBCFeatureSource featureSource = null; if (source instanceof JDBCFeatureSource) { featureSource = (JDBCFeatureSource) source; } else { featureSource = ((JDBCFeatureStore)source).getFeatureSource(); } Filter[] split = new Filter[2]; if ( original != null ) { //create a filter splitter PostPreProcessFilterSplittingVisitor splitter = new PostPreProcessFilterSplittingVisitor(getDataStore() .getFilterCapabilities(), featureSource.getSchema(), null); original.accept(splitter, null); split[0] = splitter.getFilterPre(); split[1] = splitter.getFilterPost(); } SimplifyingFilterVisitor visitor = new SimplifyingFilterVisitor(); visitor.setFIDValidator( new PrimaryKeyFIDValidator( featureSource ) ); split[0] = (Filter) split[0].accept(visitor, null); split[1] = (Filter) split[1].accept(visitor, null); return split; }