@Override public Status delete(String table, String key) { try { // firstly, retrieve the entity to be deleted TableOperation retrieveOp = TableOperation.retrieve(partitionKey, key, TableServiceEntity.class); TableServiceEntity entity = cloudTable.execute(retrieveOp).getResultAsType(); // secondly, delete the entity TableOperation deleteOp = TableOperation.delete(entity); cloudTable.execute(deleteOp); return Status.OK; } catch (Exception e) { return Status.ERROR; } }
private Status readEntity(String key, Map<String, ByteIterator> result) { try { // firstly, retrieve the entity to be deleted TableOperation retrieveOp = TableOperation.retrieve(partitionKey, key, DynamicTableEntity.class); DynamicTableEntity entity = cloudTable.execute(retrieveOp).getResultAsType(); HashMap<String, EntityProperty> properties = entity.getProperties(); for (Entry<String, EntityProperty> entry: properties.entrySet()) { String fieldName = entry.getKey(); ByteIterator fieldVal = new ByteArrayByteIterator(entry.getValue().getValueAsByteArray()); result.put(fieldName, fieldVal); } return Status.OK; } catch (Exception e) { return Status.ERROR; } }
private Status insertOrUpdate(String key, Map<String, ByteIterator> values) { HashMap<String, EntityProperty> properties = new HashMap<String, EntityProperty>(); for (Entry<String, ByteIterator> entry : values.entrySet()) { String fieldName = entry.getKey(); byte[] fieldVal = entry.getValue().toArray(); properties.put(fieldName, new EntityProperty(fieldVal)); } DynamicTableEntity entity = new DynamicTableEntity(partitionKey, key, properties); TableOperation insertOrReplace = TableOperation.insertOrReplace(entity); try { cloudTable.execute(insertOrReplace); return Status.OK; } catch (Exception e) { return Status.ERROR; } }
@Override public UUID findFirst(JobAuthorization.State jobState) { try { String partitionFilter = generateFilterCondition( "PartitionKey", TableQuery.QueryComparisons.EQUAL, configuration.getPartitionKey()); String stateFilter = generateFilterCondition( "State", TableQuery.QueryComparisons.EQUAL, jobState.name()); // properties are converted to capitalized by the storage API String combinedFilter = TableQuery.combineFilters(partitionFilter, TableQuery.Operators.AND, stateFilter); TableQuery<DataWrapper> query = TableQuery.from(DataWrapper.class).where(combinedFilter).take(1); CloudTable table = tableClient.getTableReference(JOB_TABLE); Iterator<DataWrapper> iter = table.execute(query).iterator(); if (!iter.hasNext()) { return null; } return UUID.fromString(iter.next().getRowKey()); } catch (StorageException | URISyntaxException e) { throw new MicrosoftStorageException("Error finding first job", e); } }
@Override public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { try { String whereStr = String.format("(PartitionKey eq '%s') and (RowKey ge '%s')", partitionKey, startkey); TableQuery<DynamicTableEntity> scanQuery = new TableQuery<DynamicTableEntity>(DynamicTableEntity.class) .where(whereStr).take(recordcount); int cnt = 0; for (DynamicTableEntity entity : cloudTable.execute(scanQuery)) { HashMap<String, EntityProperty> properties = entity.getProperties(); HashMap<String, ByteIterator> cur = new HashMap<String, ByteIterator>(); for (Entry<String, EntityProperty> entry : properties.entrySet()) { String fieldName = entry.getKey(); ByteIterator fieldVal = new ByteArrayByteIterator(entry.getValue().getValueAsByteArray()); if (fields == null || fields.contains(fieldName)) { cur.put(fieldName, fieldVal); } } result.add(cur); if (++cnt == recordcount) { break; } } return Status.OK; } catch (Exception e) { return Status.ERROR; } }
public Status readSubset(String key, Set<String> fields, Map<String, ByteIterator> result) { String whereStr = String.format("RowKey eq '%s'", key); TableQuery<TableServiceEntity> projectionQuery = TableQuery.from( TableServiceEntity.class).where(whereStr).select(fields.toArray(new String[0])); EntityResolver<HashMap<String, ByteIterator>> resolver = new EntityResolver<HashMap<String, ByteIterator>>() { public HashMap<String, ByteIterator> resolve(String partitionkey, String rowKey, Date timeStamp, HashMap<String, EntityProperty> properties, String etag) { HashMap<String, ByteIterator> tmp = new HashMap<String, ByteIterator>(); for (Entry<String, EntityProperty> entry : properties.entrySet()) { String key = entry.getKey(); ByteIterator val = new ByteArrayByteIterator(entry.getValue().getValueAsByteArray()); tmp.put(key, val); } return tmp; } }; try { for (HashMap<String, ByteIterator> tmp : cloudTable.execute(projectionQuery, resolver)) { for (Entry<String, ByteIterator> entry : tmp.entrySet()){ String fieldName = entry.getKey(); ByteIterator fieldVal = entry.getValue(); result.put(fieldName, fieldVal); } } return Status.OK; } catch (Exception e) { return Status.ERROR; } }
/** * Retrieve all rows in a table with the given partition key. * @param partitionKey Job model version of the processors to be retrieved. * @return Iterable list of processor entities. */ public Iterable<ProcessorEntity> getEntitiesWithPartition(String partitionKey) { String partitionFilter = TableQuery.generateFilterCondition(PARTITION_KEY, TableQuery.QueryComparisons.EQUAL, partitionKey); TableQuery<ProcessorEntity> partitionQuery = TableQuery.from(ProcessorEntity.class).where(partitionFilter); return table.execute(partitionQuery); }
private void create(String rowKey, String tableName, String state, Object type) throws IOException { try { CloudTable table = tableClient.getTableReference(tableName); String serializedJob = configuration.getMapper().writeValueAsString(type); DataWrapper wrapper = new DataWrapper( configuration.getPartitionKey(), rowKey, state, serializedJob); // job id used as key TableOperation insert = TableOperation.insert(wrapper); table.execute(insert); } catch (JsonProcessingException | StorageException | URISyntaxException e) { throw new IOException("Error creating data for rowKey: " + rowKey, e); } }
private Status insertBatch(String key, Map<String, ByteIterator> values) { HashMap<String, EntityProperty> properties = new HashMap<String, EntityProperty>(); for (Entry<String, ByteIterator> entry : values.entrySet()) { String fieldName = entry.getKey(); byte[] fieldVal = entry.getValue().toArray(); properties.put(fieldName, new EntityProperty(fieldVal)); } DynamicTableEntity entity = new DynamicTableEntity(partitionKey, key, properties); BATCH_OPERATION.insertOrReplace(entity); if (++curIdx == batchSize) { try { cloudTable.execute(BATCH_OPERATION); BATCH_OPERATION.clear(); curIdx = 0; } catch (Exception e) { return Status.ERROR; } } return Status.OK; }
@Override public void updateJob(UUID jobId, PortabilityJob job, JobUpdateValidator validator) throws IOException { Preconditions.checkNotNull(jobId, "Job is null"); Preconditions.checkNotNull(job, "Job is null"); try { CloudTable table = tableClient.getTableReference(JOB_TABLE); String serializedJob = configuration.getMapper().writeValueAsString(job); DataWrapper wrapper = new DataWrapper( configuration.getPartitionKey(), jobId.toString(), job.jobAuthorization().state().name(), serializedJob); if (validator != null) { PortabilityJob previousJob = findJob(jobId); if (previousJob == null) { throw new IOException("Could not find record for jobId: " + jobId); } validator.validate(previousJob, job); } TableOperation insert = TableOperation.insertOrReplace(wrapper); table.execute(insert); } catch (JsonProcessingException | StorageException | URISyntaxException e) { throw new IOException("Error updating job: " + jobId, e); } }
cloudTable = tableClient.getTableReference(table); cloudTable.createIfNotExists(); } catch (Exception e) { throw new DBException("Could not connect to the table.\n", e);
/** * Adds a table operation to delete the specified entity to the batch operation. * * @param entity * The {@link TableEntity} to delete. */ public void delete(final TableEntity entity) { this.lockToPartitionKey(entity.getPartitionKey()); this.add(TableOperation.delete(entity)); }
/** * Adds a table operation to insert or replace the specified entity to the batch operation. * * @param entity * The {@link TableEntity} to insert if not found or to replace if it exists. */ public void insertOrReplace(final TableEntity entity) { this.lockToPartitionKey(entity.getPartitionKey()); this.add(TableOperation.insertOrReplace(entity)); }
/** * Adds a table operation to merge the specified entity to the batch operation. * * @param entity * The {@link TableEntity} to merge. */ public void merge(final TableEntity entity) { this.lockToPartitionKey(entity.getPartitionKey()); this.add(TableOperation.merge(entity)); }
/** * Adds a table operation to insert or merge the specified entity to the batch operation. * * @param entity * The {@link TableEntity} to insert if not found or to merge if it exists. */ public void insertOrMerge(final TableEntity entity) { this.lockToPartitionKey(entity.getPartitionKey()); this.add(TableOperation.insertOrMerge(entity)); }
public HashMap<String, ByteIterator> resolve(String partitionkey, String rowKey, Date timeStamp, HashMap<String, EntityProperty> properties, String etag) { HashMap<String, ByteIterator> tmp = new HashMap<String, ByteIterator>(); for (Entry<String, EntityProperty> entry : properties.entrySet()) { String key = entry.getKey(); ByteIterator val = new ByteArrayByteIterator(entry.getValue().getValueAsByteArray()); tmp.put(key, val); } return tmp; } };
private void remove(UUID jobId, String tableName) throws IOException { try { CloudTable table = tableClient.getTableReference(tableName); TableOperation retrieve = TableOperation.retrieve( configuration.getPartitionKey(), jobId.toString(), DataWrapper.class); TableResult result = table.execute(retrieve); DataWrapper wrapper = result.getResultAsType(); TableOperation delete = TableOperation.delete(wrapper); table.execute(delete); } catch (StorageException | URISyntaxException e) { throw new IOException("Error removing data for job: " + jobId, e); } }
public void init() { try { String endpoint = String.format(ENDPOINT_TEMPLATE, configuration.getAccountName()); CloudStorageAccount cosmosAccount = CloudStorageAccount.parse( String.format( COSMOS_CONNECTION_TEMPLATE, configuration.getAccountName(), configuration.getAccountKey(), endpoint)); tableClient = cosmosAccount.createCloudTableClient(); // Create the tables if the do not exist tableClient.getTableReference(JOB_TABLE).createIfNotExists(); tableClient.getTableReference(JOB_DATA_TABLE).createIfNotExists(); CloudStorageAccount blobAccount = CloudStorageAccount.parse( String.format( BLOB_CONNECTION_TEMPLATE, configuration.getAccountName(), configuration.getBlobKey())); blobClient = blobAccount.createCloudBlobClient(); blobClient.getContainerReference(BLOB_CONTAINER).createIfNotExists(); } catch (StorageException | URISyntaxException | InvalidKeyException e) { throw new MicrosoftStorageException(e); } }
private <T> T find(Class<T> type, String rowKey, String tableName) { try { CloudTable table = tableClient.getTableReference(tableName); TableOperation retrieve = TableOperation.retrieve( configuration.getPartitionKey(), rowKey, DataWrapper.class); TableResult result = table.execute(retrieve); DataWrapper wrapper = result.getResultAsType(); return configuration.getMapper().readValue(wrapper.getSerialized(), type); } catch (StorageException | IOException | URISyntaxException e) { throw new MicrosoftStorageException("Error finding data for rowKey: " + rowKey, e); } }
@Override public PortabilityJob findJob(UUID jobId) { Preconditions.checkNotNull(jobId, "Job id is null"); try { CloudTable table = tableClient.getTableReference(JOB_TABLE); TableOperation retrieve = TableOperation.retrieve( configuration.getPartitionKey(), jobId.toString(), DataWrapper.class); TableResult result = table.execute(retrieve); DataWrapper wrapper = result.getResultAsType(); return configuration.getMapper().readValue(wrapper.getSerialized(), PortabilityJob.class); } catch (StorageException | URISyntaxException | IOException e) { throw new MicrosoftStorageException("Error finding job: " + jobId, e); } }