private static Map<String, String> translateOperatorTypes(Set<String> operators) { if (operators.size() == 1) { // don't display operator (plan node) name again return ImmutableMap.of(getOnlyElement(operators), ""); } if (operators.contains("LookupJoinOperator") && operators.contains("HashBuilderOperator")) { // join plan node return ImmutableMap.of( "LookupJoinOperator", "Left (probe) ", "HashBuilderOperator", "Right (build) "); } return ImmutableMap.of(); }
public boolean isUnpartitioned() { return partitions.size() == 1 && getOnlyElement(partitions).isUnpartitioned(); } }
private Object coerceValueForField(Field context, Object value) { if (context.isRepeated()) { return value instanceof List ? value : ImmutableList.of(value); } else { return value instanceof List ? getOnlyElement((List) value) : value; } }
@Test public void testNotAnnotatedAggregateStateAggregationParse() { ParametricAggregation aggregation = parseFunctionDefinition(NotAnnotatedAggregateStateAggregationFunction.class); AggregationImplementation implementation = getOnlyElement(aggregation.getImplementations().getExactImplementations().values()); List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.STATE, AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL); assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes)); InternalAggregationFunction specialized = aggregation.specialize(BoundVariables.builder().build(), 1, new TypeRegistry(), null); assertEquals(specialized.getFinalType(), DoubleType.DOUBLE); assertTrue(specialized.isDecomposable()); assertEquals(specialized.name(), "no_aggregation_state_aggregate"); }
private List<Expression> extractDisjuncts(Type type, DiscreteValues discreteValues, SymbolReference reference) { List<Expression> values = discreteValues.getValues().stream() .map(object -> literalEncoder.toExpression(object, type)) .collect(toList()); // If values is empty, then the equatableValues was either ALL or NONE, both of which should already have been checked for checkState(!values.isEmpty()); Expression predicate; if (values.size() == 1) { predicate = new ComparisonExpression(EQUAL, reference, getOnlyElement(values)); } else { predicate = new InPredicate(reference, new InListExpression(values)); } if (!discreteValues.isWhiteList()) { predicate = new NotExpression(predicate); } return ImmutableList.of(predicate); }
.map(Aggregation::getMask).filter(Optional::isPresent).map(Optional::get).collect(toImmutableList()); Set<Symbol> uniqueMasks = ImmutableSet.copyOf(masks); if (uniqueMasks.size() != 1 || masks.size() == node.getAggregations().size()) { return context.defaultRewrite(node, Optional.empty()); Iterables.getOnlyElement(uniqueMasks), node.getAggregations()); functionCall.getWindow(), false, ImmutableList.of(aggregateInfo.getNewDistinctAggregateSymbol().toSymbolReference())), entry.getValue().getSignature(), Optional.empty())); String signatureName = entry.getValue().getSignature().getName(); Aggregation aggregation = new Aggregation( new FunctionCall(functionName, functionCall.getWindow(), false, ImmutableList.of(argument.toSymbolReference())), getFunctionSignature(functionName, argument), Optional.empty()); aggregations.build(), node.getGroupingSets(), ImmutableList.of(), node.getStep(), Optional.empty(),
public void execute(QueryRunner prestoExecutor, Session session, DataSetup dataSetup) { List<Type> expectedTypes = inputs.stream().map(Input::getPrestoResultType).collect(toList()); List<Object> expectedResults = inputs.stream().map(Input::toPrestoQueryResult).collect(toList()); try (TestTable testTable = dataSetup.setupTestTable(unmodifiableList(inputs))) { MaterializedResult materializedRows = prestoExecutor.execute(session, "SELECT * from " + testTable.getName()); assertEquals(materializedRows.getTypes(), expectedTypes); List<Object> actualResults = getOnlyElement(materializedRows).getFields(); assertEquals(actualResults.size(), expectedResults.size(), "lists don't have the same size"); for (int i = 0; i < expectedResults.size(); i++) { assertEquals(actualResults.get(i), expectedResults.get(i), "Element " + i); } } }
@Test public void testPassEscapeInMetaDataQuery() throws Exception { DatabaseMetaData metaData = connection.getMetaData(); Set<String> queries = captureQueries(() -> { String schemaPattern = "defau" + metaData.getSearchStringEscape() + "_t"; try (ResultSet resultSet = metaData.getColumns("blackhole", schemaPattern, null, null)) { assertFalse(resultSet.next(), "There should be no results"); } return null; }); assertEquals(queries.size(), 1, "Expected exactly one query, got " + queries.size()); String query = getOnlyElement(queries); assertContains(query, "_t' ESCAPE '", "Metadata query does not contain ESCAPE"); }
Pair<Executor, Runnable> executorRunnablePair = Iterables.getOnlyElement(handOffCallbacks.values()); executorRunnablePair.lhs.execute(executorRunnablePair.rhs); handOffCallbacks.clear(); Assert.assertEquals(1, mdc.getPublished().size()); DataSegment segment = mdc.getPublished().iterator().next(); Assert.assertEquals("test_ds", segment.getDataSource()); Assert.assertEquals(ImmutableList.of("dim1", "dim2"), segment.getDimensions()); Assert.assertEquals( Intervals.of(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")), segment.getInterval() ); Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics()); EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
@Test public void testExplainOfCreateTableAs() { String query = "CREATE TABLE copy_orders AS SELECT * FROM orders"; MaterializedResult result = computeActual("EXPLAIN " + query); assertEquals(getOnlyElement(result.getOnlyColumnAsSet()), getExplainPlan(query, LOGICAL)); }
@Test public void testResetSession() { MaterializedResult result = computeActual(getSession(), "RESET SESSION test_string"); assertTrue((Boolean) getOnlyElement(result).getField(0)); assertEquals(result.getResetSessionProperties(), ImmutableSet.of("test_string")); result = computeActual(getSession(), format("RESET SESSION %s.connector_string", TESTING_CATALOG)); assertTrue((Boolean) getOnlyElement(result).getField(0)); assertEquals(result.getResetSessionProperties(), ImmutableSet.of(TESTING_CATALOG + ".connector_string")); }
public void testInvalidGetPartitionsByNames() { Map<String, Optional<Partition>> partitionsByNames = metastore.getPartitionsByNames(BAD_DATABASE, TEST_TABLE, ImmutableList.of(TEST_PARTITION1)); assertEquals(partitionsByNames.size(), 1); Optional<Partition> onlyElement = Iterables.getOnlyElement(partitionsByNames.values()); assertFalse(onlyElement.isPresent()); }
private void assertFilter(String filter, boolean expected, boolean withNoInputColumns, ExpressionCompiler compiler) { List<Boolean> results = executeFilterWithAll(filter, TEST_SESSION, withNoInputColumns, compiler); HashSet<Boolean> resultSet = new HashSet<>(results); // we should only have a single result assertTrue(resultSet.size() == 1, "Expected only [" + expected + "] result unique result, but got " + resultSet); assertEquals((boolean) Iterables.getOnlyElement(resultSet), expected); }
public Type getIntermediateType() { if (intermediateType.size() == 1) { return getOnlyElement(intermediateType); } else { return RowType.anonymous(intermediateType); } }
@Test public void testStateOnDifferentThanFirstPositionAggregationParse() { Signature expectedSignature = new Signature( "simple_exact_aggregate_aggregation_state_moved", FunctionKind.AGGREGATE, DoubleType.DOUBLE.getTypeSignature(), ImmutableList.of(DoubleType.DOUBLE.getTypeSignature())); ParametricAggregation aggregation = parseFunctionDefinition(StateOnDifferentThanFirstPositionAggregationFunction.class); assertEquals(aggregation.getSignature(), expectedSignature); AggregationImplementation implementation = getOnlyElement(aggregation.getImplementations().getExactImplementations().values()); assertEquals(implementation.getDefinitionClass(), StateOnDifferentThanFirstPositionAggregationFunction.class); List<AggregationMetadata.ParameterMetadata.ParameterType> expectedMetadataTypes = ImmutableList.of(AggregationMetadata.ParameterMetadata.ParameterType.INPUT_CHANNEL, AggregationMetadata.ParameterMetadata.ParameterType.STATE); assertTrue(implementation.getInputParameterMetadataTypes().equals(expectedMetadataTypes)); }
@Override public synchronized void updatePartitionStatistics(String databaseName, String tableName, String partitionName, Function<PartitionStatistics, PartitionStatistics> update) { PartitionStatistics currentStatistics = requireNonNull( getPartitionStatistics(databaseName, tableName, ImmutableSet.of(partitionName)).get(partitionName), "getPartitionStatistics() returned null"); PartitionStatistics updatedStatistics = update.apply(currentStatistics); List<Partition> partitions = getPartitionsByNames(databaseName, tableName, ImmutableList.of(partitionName)); if (partitions.size() != 1) { throw new PrestoException(HIVE_METASTORE_ERROR, "Metastore returned multiple partitions for name: " + partitionName); } Partition originalPartition = getOnlyElement(partitions); Partition modifiedPartition = originalPartition.deepCopy(); HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics(); modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), basicStatistics)); alterPartitionWithoutStatistics(databaseName, tableName, modifiedPartition); Map<String, HiveType> columns = modifiedPartition.getSd().getCols().stream() .collect(toImmutableMap(FieldSchema::getName, schema -> HiveType.valueOf(schema.getType()))); setPartitionColumnStatistics(databaseName, tableName, partitionName, columns, updatedStatistics.getColumnStatistics(), basicStatistics.getRowCount()); Set<String> removedStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet()); removedStatistics.forEach(column -> deletePartitionColumnStatistics(databaseName, tableName, partitionName, column)); }
protected HiveSplit getHiveSplit(ConnectorTableHandle tableHandle) { List<ConnectorSplit> splits = getAllSplits(tableHandle, TupleDomain.all()); assertEquals(splits.size(), 1); return (HiveSplit) getOnlyElement(splits); }
@Override public boolean supportsBitmapIndex(final BitmapIndexSelector selector) { if (requiredBindings.isEmpty()) { // Constant expression. return true; } else if (requiredBindings.size() == 1) { // Single-column expression. We can use bitmap indexes if this column has an index and does not have // multiple values. The lack of multiple values is important because expression filters treat multi-value // arrays as nulls, which doesn't permit index based filtering. final String column = Iterables.getOnlyElement(requiredBindings); return selector.getBitmapIndex(column) != null && !selector.hasMultipleValues(column); } else { // Multi-column expression. return false; } }
@Test public void testMergeAllBackwardsSequentially() { DisjointSet<Integer> disjoint = new DisjointSet<>(); // insert pair (i, i+1); assert all inserts are considered new for (int i = 100; i > 0; i--) { assertTrue(disjoint.findAndUnion(i, i - 1)); if (i != 100) { assertEquals(disjoint.find(i + 1), disjoint.find(i)); } if (i != 1) { assertNotEquals(disjoint.find(i - 1), disjoint.find(i - 2)); } } // assert every pair (i, j) is in the same set for (int i = 0; i <= 100; i++) { for (int j = 0; j <= 100; j++) { assertEquals(disjoint.find(i), disjoint.find(j)); assertFalse(disjoint.findAndUnion(i, j)); } } Collection<Set<Integer>> equivalentClasses = disjoint.getEquivalentClasses(); assertEquals(equivalentClasses.size(), 1); assertEquals(Iterables.getOnlyElement(equivalentClasses).size(), 101); }
@Override public StreamProperties visitAssignUniqueId(AssignUniqueId node, List<StreamProperties> inputProperties) { StreamProperties properties = Iterables.getOnlyElement(inputProperties); if (properties.getPartitioningColumns().isPresent()) { // preserve input (possibly preferred) partitioning return properties; } return new StreamProperties(properties.getDistribution(), Optional.of(ImmutableList.of(node.getIdColumn())), properties.isOrdered()); }