writeRequests.add(new WriteRequest() .withPutRequest(new PutRequest() .withItem(toAttributeValues(item)))); writeRequests.add(new WriteRequest() .withDeleteRequest(new DeleteRequest() .withKey(toAttributeValueMap(pkToDelete))));
toParameters(attributeValues, clazz, tableName, config); requestItems.add(tableName, new WriteRequest(new PutRequest(transformAttributes(parameters)))); requestItems.add(tableName, new WriteRequest(new DeleteRequest(key)));
public WriteRequest unmarshall(JsonUnmarshallerContext context) throws Exception { WriteRequest writeRequest = new WriteRequest();
@Override public List<Object> batchCreate(RequestContext context, List<User> users) { List<WriteRequest> collect = users.stream() .map(user -> new WriteRequest(new PutRequest(generatePutRequest(context.project, user.id, user.properties)))) .collect(Collectors.toList()); dynamoDBClient.batchWriteItem(new BatchWriteItemRequest().withRequestItems(ImmutableMap.of(context.project, collect))); return null; }
new WriteRequest().withPutRequest( new PutRequest().withItem( transformAttributes(parameters)))); new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(key)));
@Test public void testWriteOneBatchWithEntityTooLarge() { Map<String, List<WriteRequest>> batchMap = new HashMap<String, List<WriteRequest>>(); List<WriteRequest> batchList = new ArrayList<WriteRequest>(); WriteRequest wr1 = new WriteRequest(); WriteRequest wr2 = new WriteRequest(); WriteRequest wr3 = new WriteRequest(); batchList.add(wr1); batchList.add(wr2); batchList.add(wr3); batchMap.put("testTable", batchList); EasyMock.reset(mockClient); AmazonServiceException ase = new AmazonServiceException("TestException"); ase.setErrorCode("Request entity too large"); BatchWriteItemResult mockResult = EasyMock.createMock(BatchWriteItemResult.class); EasyMock.reset(mockResult); EasyMock.expect(mockResult.getUnprocessedItems()).andReturn( new HashMap<String, List<WriteRequest>>()).times(2); // Will cause batches to be split and re-tried EasyMock.expect(mockClient.batchWriteItem(anyObject(BatchWriteItemRequest.class))) .andThrow(ase); EasyMock.expect(mockClient.batchWriteItem(anyObject(BatchWriteItemRequest.class))) .andReturn(mockResult); EasyMock.expect(mockClient.batchWriteItem(anyObject(BatchWriteItemRequest.class))) .andReturn(mockResult); EasyMock.replay(mockClient, mockResult); List<FailedBatch> result = mapper.writeOneBatch(batchMap); assertEquals(result.size(), 0); EasyMock.verify(mockClient); }
public WriteRequest unmarshall(JsonUnmarshallerContext context) throws Exception { AwsJsonReader reader = context.getReader(); if (!reader.isContainer()) { reader.skipValue(); return null; } WriteRequest writeRequest = new WriteRequest(); reader.beginObject(); while (reader.hasNext()) { String name = reader.nextName(); if (name.equals("PutRequest")) { writeRequest.setPutRequest(PutRequestJsonUnmarshaller.getInstance() .unmarshall(context)); } else if (name.equals("DeleteRequest")) { writeRequest.setDeleteRequest(DeleteRequestJsonUnmarshaller.getInstance() .unmarshall(context)); } else { reader.skipValue(); } } reader.endObject(); return writeRequest; }
private List<WriteRequest> generateWriteRequests(List<String> keys) { return keys.stream() .map(key -> ImmutableMap.of(partitionKeyName, new AttributeValue(key), ATTRIBUTE_NAME, new AttributeValue(this.dataGenerator.getRandomValue()))) .map(item -> new PutRequest().withItem(item)) .map(put -> new WriteRequest().withPutRequest(put)) .collect(Collectors.toList()); }
@Override public <P extends ParaObject> void deleteAll(String appid, List<P> objects) { if (objects == null || objects.isEmpty() || StringUtils.isBlank(appid)) { return; } List<WriteRequest> reqs = new ArrayList<WriteRequest>(objects.size()); for (ParaObject object : objects) { if (object != null) { reqs.add(new WriteRequest().withDeleteRequest(new DeleteRequest(). withKey(Collections.singletonMap(Config._KEY, new AttributeValue(object.getId()))))); } } batchWrite(Collections.singletonMap(getTablNameForAppid(appid), reqs)); logger.debug("DAO.deleteAll() {}", objects.size()); }
@Override public <P extends ParaObject> void deleteAll(String appid, List<P> objects) { if (objects == null || objects.isEmpty() || StringUtils.isBlank(appid)) { return; } List<WriteRequest> reqs = new ArrayList<>(objects.size()); for (ParaObject object : objects) { if (object != null) { reqs.add(new WriteRequest().withDeleteRequest(new DeleteRequest(). withKey(Collections.singletonMap(Config._KEY, new AttributeValue(getKeyForAppid(object.getId(), appid)))))); } } batchWrite(Collections.singletonMap(getTableNameForAppid(appid), reqs), 1); logger.debug("DAO.deleteAll() {}", objects.size()); }
@Override public <P extends ParaObject> void deleteAll(String appid, List<P> objects) { if (objects == null || objects.isEmpty() || StringUtils.isBlank(appid)) { return; } List<WriteRequest> reqs = new ArrayList<>(objects.size()); for (ParaObject object : objects) { if (object != null) { reqs.add(new WriteRequest().withDeleteRequest(new DeleteRequest(). withKey(Collections.singletonMap(Config._KEY, new AttributeValue(getKeyForAppid(object.getId(), appid)))))); } } batchWrite(Collections.singletonMap(getTableNameForAppid(appid), reqs), 1); logger.debug("DAO.deleteAll() {}", objects.size()); }
public void deleteRowsFromDataIndex(final byte[][] dataIds, final short adapterId) { final String tableName = getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName()); final Iterator<byte[]> dataIdIterator = Arrays.stream(dataIds).iterator(); while (dataIdIterator.hasNext()) { final List<WriteRequest> deleteRequests = new ArrayList<>(); int i = 0; while (dataIdIterator.hasNext() && (i < MAX_ROWS_FOR_BATCHWRITER)) { deleteRequests.add( new WriteRequest( new DeleteRequest( Collections.singletonMap( DynamoDBRow.GW_PARTITION_ID_KEY, new AttributeValue().withB(ByteBuffer.wrap(dataIdIterator.next())))))); i++; } client.batchWriteItem(Collections.singletonMap(tableName, deleteRequests)); } }
writeBatchList = writeBatchMap.get(tableName); writeBatchList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(item))); writeBatchMapSizeBytes += itemSizeBytes;
@Override public List<Map<String, AttributeValue>> emit(final UnmodifiableBuffer<Map<String, AttributeValue>> buffer) throws IOException { // Map of WriteRequests to records for reference Map<WriteRequest, Map<String, AttributeValue>> requestMap = new HashMap<WriteRequest, Map<String, AttributeValue>>(); List<Map<String, AttributeValue>> unproc = new ArrayList<Map<String, AttributeValue>>(); // Build a batch request with a record list List<WriteRequest> rList = new ArrayList<WriteRequest>(); List<Map<String, AttributeValue>> resultList; // Amazon DynamoDB only allows one operation per item in a bulk insertion (no duplicate items) Set<Map<String, AttributeValue>> uniqueItems = uniqueItems(buffer.getRecords()); for (Map<String, AttributeValue> item : uniqueItems) { WriteRequest wr = new WriteRequest().withPutRequest(new PutRequest().withItem(item)); // add to the map requestMap.put(wr, item); // add to the list of requests rList.add(wr); // Max of sixteen not to exceed maximum request size if (rList.size() == 16) { resultList = performBatchRequest(rList, requestMap); unproc.addAll(resultList); rList.clear(); } } resultList = performBatchRequest(rList, requestMap); unproc.addAll(resultList); LOG.info("Successfully emitted " + (buffer.getRecords().size() - unproc.size()) + " records into DynamoDB."); return unproc; }
@Override public List<Map<String, AttributeValue>> emit(final UnmodifiableBuffer<Map<String, AttributeValue>> buffer) throws IOException { // Map of WriteRequests to records for reference Map<WriteRequest, Map<String, AttributeValue>> requestMap = new HashMap<WriteRequest, Map<String, AttributeValue>>(); List<Map<String, AttributeValue>> unproc = new ArrayList<Map<String, AttributeValue>>(); // Build a batch request with a record list List<WriteRequest> rList = new ArrayList<WriteRequest>(); List<Map<String, AttributeValue>> resultList; // Amazon DynamoDB only allows one operation per item in a bulk insertion (no duplicate items) Set<Map<String, AttributeValue>> uniqueItems = uniqueItems(buffer.getRecords()); for (Map<String, AttributeValue> item : uniqueItems) { WriteRequest wr = new WriteRequest().withPutRequest(new PutRequest().withItem(item)); // add to the map requestMap.put(wr, item); // add to the list of requests rList.add(wr); // Max of sixteen not to exceed maximum request size if (rList.size() == 16) { resultList = performBatchRequest(rList, requestMap); unproc.addAll(resultList); rList.clear(); } } resultList = performBatchRequest(rList, requestMap); unproc.addAll(resultList); LOG.info("Successfully emitted " + (buffer.getRecords().size() - unproc.size()) + " records into DynamoDB."); return unproc; }
Map<String, AttributeValue> row = toRow(object, null); setRowKey(object.getId(), row); reqs.add(new WriteRequest().withPutRequest(new PutRequest().withItem(row))); j++;
logger.debug("Preparing to delete '{}' from shared table, appid: '{}'.", key, appid); pager.setLastKey(item.getString(Config._ID)); deletePage.add(new WriteRequest().withDeleteRequest(new DeleteRequest(). withKey(Collections.singletonMap(Config._KEY, new AttributeValue(key)))));
public BatchWritePointResult batchWritePoints(List<PutPointRequest> putPointRequests) { BatchWriteItemRequest batchItemRequest = new BatchWriteItemRequest(); List<WriteRequest> writeRequests = new ArrayList<WriteRequest>(); for (PutPointRequest putPointRequest : putPointRequests) { long geohash = S2Manager.generateGeohash(putPointRequest.getGeoPoint()); long hashKey = S2Manager.generateHashKey(geohash, config.getHashKeyLength()); String geoJson = GeoJsonMapper.stringFromGeoObject(putPointRequest.getGeoPoint()); PutRequest putRequest = putPointRequest.getPutRequest(); AttributeValue hashKeyValue = new AttributeValue().withN(String.valueOf(hashKey)); putRequest.getItem().put(config.getHashKeyAttributeName(), hashKeyValue); putRequest.getItem().put(config.getRangeKeyAttributeName(), putPointRequest.getRangeKeyValue()); AttributeValue geohashValue = new AttributeValue().withN(Long.toString(geohash)); putRequest.getItem().put(config.getGeohashAttributeName(), geohashValue); AttributeValue geoJsonValue = new AttributeValue().withS(geoJson); putRequest.getItem().put(config.getGeoJsonAttributeName(), geoJsonValue); WriteRequest writeRequest = new WriteRequest(putRequest); writeRequests.add(writeRequest); } Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>(); requestItems.put(config.getTableName(), writeRequests); batchItemRequest.setRequestItems(requestItems); BatchWriteItemResult batchWriteItemResult = config.getDynamoDBClient().batchWriteItem(batchItemRequest); BatchWritePointResult batchWritePointResult = new BatchWritePointResult(batchWriteItemResult); return batchWritePointResult; }
if(value.length == 0) value = EMPTY_VALUE; item.put("value", new AttributeValue().withB(ByteBuffer.wrap(value))); list.add(new WriteRequest().withPutRequest(new PutRequest(item))); list.add(new WriteRequest().withDeleteRequest(new DeleteRequest(item))); for(DColumn c: getColumnSlice(mutation.getStoreName(), mutation.getRowKey(), null, null)) { Map<String, AttributeValue> item = getPrimaryKey(mutation.getStoreName() + "_" + mutation.getRowKey(), c.getName()); list.add(new WriteRequest().withDeleteRequest(new DeleteRequest(item)));
public WriteRequest unmarshall(JsonUnmarshallerContext context) throws Exception { WriteRequest writeRequest = new WriteRequest();