@Override public Boolean idempotentTask() throws TException { ThriftHiveMetastore.Iface client = description.metastoreClient(conf); String db = description.getTableDesc().getDatabaseName(); String table = description.getTableDesc().getTableName(); if (oti.hasPartitionInfo()) { Map<String, String> partitionSpec = description.getPartitionValues(); List<String> partitionValues = listOfPartitionValues( partitionSpec, oti.getPartitionInfo()); if (partitionExists(client, db, table, partitionValues)) { LOG.error("Table " + db + ":" + table + " partition " + partitionSpec + " already exists"); return true; } } return false; } };
/** * Drop partition which we will be writing to * @param conf Configuration to use * @param description HiveOutputDescription * @param oti OutputInfo * @return True iff partition was dropped */ private boolean dropPartitionIfExists(Configuration conf, HiveOutputDescription description, OutputInfo oti) throws IOException { ThriftHiveMetastore.Iface client; try { client = description.metastoreClient(conf); } catch (TException e) { throw new IOException(e); } String db = description.getTableDesc().getDatabaseName(); String table = description.getTableDesc().getTableName(); if (oti.hasPartitionInfo()) { Map<String, String> partitionSpec = description.getPartitionValues(); List<String> partitionValues = listOfPartitionValues( partitionSpec, oti.getPartitionInfo()); if (partitionExists(client, db, table, partitionValues)) { LOG.info("Dropping partition {} from table {}:{}", partitionSpec, db, table); return dropPartition(client, db, table, partitionValues); } } return false; }
@Override public Void idempotentTask() throws TException { String dbName = outputDesc.getTableDesc().getDatabaseName(); String tableName = outputDesc.getTableDesc().getTableName(); ThriftHiveMetastore.Iface client = outputDesc.metastoreClient(conf); Table hiveTable = client.get_table(dbName, tableName); Partition partition = new Partition(); partition.setDbName(dbName); partition.setTableName(tableName); partition.setParameters(outputInfo.getTableParams()); List<String> partitionValues = HiveUtils.orderedPartitionValues( hiveTable.getPartitionKeys(), outputDesc.getPartitionValues()); partition.setValues(partitionValues); StorageDescriptor sd = new StorageDescriptor(hiveTable.getSd()); sd.setParameters(outputInfo.getSerializerParams()); sd.setLocation(outputInfo.getFinalOutputPath()); sd.setCols(outputInfo.getColumnInfo()); partition.setSd(sd); LOG.info("Registering partition with values {} located at {}", outputInfo.getSerializerParams(), outputInfo.getFinalOutputPath()); try { client.add_partition(partition); } catch (AlreadyExistsException e) { LOG.info("Partition already exists; Giraph must have just created it"); } catch (InvalidObjectException e) { throw new IllegalStateException(e); } return null; } };
try { partitionPiece = HiveUtils.computePartitionPath( outputInfo.getPartitionInfo(), outputDesc.getPartitionValues()); } catch (MetaException e) { throw new IOException(e);