public static String getEncodedRowkey(AlertAPIEntity entity) throws Exception { InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, EntityDefinitionManager.getEntityDefinitionByEntityClass(entity.getClass())); return EagleBase64Wrapper.encodeByteArray2URLSafeString(RowkeyBuilder.buildRowkey(log)); }
/** * build rowkey from InternalLog object * * @param log internal log entity to write * @return the rowkey of the entity */ public static byte[] buildRowkey(InternalLog log) { final String[] partitions = log.getPartitions(); final Map<String, String> tags = log.getTags(); final SortedMap<Integer, Integer> tagHashMap = generateSortedTagMap(partitions, tags); // reverse timestamp long ts = Long.MAX_VALUE - log.getTimestamp(); List<Integer> partitionHashValues = new ArrayList<Integer>(); if (partitions != null) { for (String partition : partitions) { final String tagValue = tags.get(partition); if (tagValue != null) { partitionHashValues.add(tagValue.hashCode()); } else { partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE); } } } return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap); }
/** * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in * the result map. * * @param partitions array of partition tags in order * @param tags tags of the entity * @return the sorted hash map of the tags */ public static SortedMap<Integer, Integer> generateSortedTagMap(String[] partitions, Map<String, String> tags) { final SortedMap<Integer, Integer> tagHashMap = new TreeMap<Integer, Integer>(); for (Map.Entry<String, String> entry : tags.entrySet()) { final String tagName = entry.getKey(); final String tagValue = entry.getValue(); // If it's a partition tag, we need to remove it from tag hash list. It need to // put to the fix partition hash slot in rowkey. if (tagValue == null || isPartitionTag(partitions, tagName)) { continue; } tagHashMap.put(tagName.hashCode(), tagValue.hashCode()); } return tagHashMap; }
counter++; byte[] row = results.get(0).getRow(); long timestamp = RowkeyBuilder.getTimestamp(row, ed);
public byte[] generateIndexRowkey(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { if (entity.getClass() != entityDef.getEntityClass()) { throw new IllegalArgumentException("Expected entity class: " + entityDef.getEntityClass().getName() + ", but got class " + entity.getClass().getName()); } final byte[][] indexValues = generateIndexValues(entity); final int[] partitionHashCodes = generatePartitionHashCodes(entity); SortedMap<Integer, Integer> tagMap = null; if (!index.unique()) { // non cluster index tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags()); } return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap); }
byte[] row = results.get(0).getRow(); long timestamp = RowkeyBuilder.getTimestamp(row,ed);
public byte[] generateIndexRowkey(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { if (entity.getClass() != entityDef.getEntityClass()) { throw new IllegalArgumentException("Expected entity class: " + entityDef.getEntityClass().getName() + ", but got class " + entity.getClass().getName()); } final byte[][] indexValues = generateIndexValues(entity); final int[] partitionHashCodes = generatePartitionHashCodes(entity); SortedMap<Integer, Integer> tagMap = null; if (!index.unique()) { // non cluster index tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags()); } return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap); }
/** * TODO need think about if multi-PUT is necessary, by checking if autoFlush works */ public List<byte[]> write(List<InternalLog> logs) throws IOException { final List<Put> puts = new ArrayList<Put>(logs.size()); final List<byte[]> result = new ArrayList<byte[]>(logs.size()); for (InternalLog log : logs) { final byte[] rowkey = RowkeyBuilder.buildRowkey(log); final Put p = new Put(rowkey); populateColumnValues(p, log); puts.add(p); final List<byte[]> indexRowkeys = log.getIndexRowkeys(); if (indexRowkeys != null) { writeIndexes(rowkey, indexRowkeys, puts); } result.add(rowkey); } tbl.put(puts); return result; }
counter++; byte[] row = results.get(0).getRow(); long timestamp = RowkeyBuilder.getTimestamp(row, ed);
/** * build rowkey from InternalLog object * @param log internal log entity to write * @return the rowkey of the entity */ public static byte[] buildRowkey(InternalLog log) { final String[] partitions = log.getPartitions(); final Map<String, String> tags = log.getTags(); final SortedMap<Integer, Integer> tagHashMap = generateSortedTagMap(partitions, tags); // reverse timestamp long ts = Long.MAX_VALUE - log.getTimestamp(); List<Integer> partitionHashValues = new ArrayList<Integer>(); if (partitions != null) { for (String partition : partitions) { final String tagValue = tags.get(partition); if (tagValue != null) { partitionHashValues.add(tagValue.hashCode()); } else { partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE); } } } return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap); }
/** * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in the result map. * @param partitions array of partition tags in order * @param tags tags of the entity * @return the sorted hash map of the tags */ public static SortedMap<Integer, Integer> generateSortedTagMap(String[] partitions, Map<String, String> tags) { final SortedMap<Integer, Integer> tagHashMap = new TreeMap<Integer, Integer>(); for (Map.Entry<String, String> entry: tags.entrySet()) { final String tagName = entry.getKey(); final String tagValue = entry.getValue(); // If it's a partition tag, we need to remove it from tag hash list. It need to // put to the fix partition hash slot in rowkey. if (tagValue == null || isPartitionTag(partitions, tagName)) continue; tagHashMap.put(tagName.hashCode(), tagValue.hashCode()); } return tagHashMap; }
/** * TODO need think about if multi-PUT is necessary, by checking if autoFlush works */ public List<byte[]> write(List<InternalLog> logs) throws IOException{ final List<Put> puts = new ArrayList<Put>(logs.size()); final List<byte[]> result = new ArrayList<byte[]>(logs.size()); for (InternalLog log : logs) { final byte[] rowkey = RowkeyBuilder.buildRowkey(log); final Put p = new Put(rowkey); populateColumnValues(p, log); puts.add(p); final List<byte[]> indexRowkeys = log.getIndexRowkeys(); if (indexRowkeys != null) { writeIndexes(rowkey, indexRowkeys, puts); } result.add(rowkey); } tbl.put(puts); return result; }
counter ++; byte[] row = results.get(0).getRow(); long timestamp = RowkeyBuilder.getTimestamp(row, ed);
public static byte[] getRowkey(InternalLog log) { byte[] rowkey = null; if (log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())) { rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey()); } else { rowkey = RowkeyBuilder.buildRowkey(log); } return rowkey; }
if (startRowkey != null) { final byte[] lastRowkey = EagleBase64Wrapper.decode(startRowkey); lastTimestamp = RowkeyBuilder.getTimestamp(lastRowkey, entityDef);
public static byte[] getRowkey(InternalLog log) { byte[] rowkey = null; if(log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())){ rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey()); }else{ rowkey = RowkeyBuilder.buildRowkey(log); } return rowkey; }
if (startRowkey != null) { final byte[] lastRowkey = EagleBase64Wrapper.decode(startRowkey); lastTimestamp = RowkeyBuilder.getTimestamp(lastRowkey, entityDef);
/** * TODO need think about if multi-PUT is necessary, by checking if autoFlush works */ @Override public byte[] write(InternalLog log) throws IOException { final byte[] rowkey = RowkeyBuilder.buildRowkey(log); final Put p = new Put(rowkey); populateColumnValues(p, log); tbl.put(p); final List<byte[]> indexRowkeys = log.getIndexRowkeys(); if (indexRowkeys != null) { writeIndexes(rowkey, indexRowkeys); } return rowkey; }
/** * TODO need think about if multi-PUT is necessary, by checking if autoFlush works */ @Override public byte[] write(InternalLog log) throws IOException{ final byte[] rowkey = RowkeyBuilder.buildRowkey(log); final Put p = new Put(rowkey); populateColumnValues(p, log); tbl.put(p); final List<byte[]> indexRowkeys = log.getIndexRowkeys(); if (indexRowkeys != null) { writeIndexes(rowkey, indexRowkeys); } return rowkey; }
public static byte[] getRowkey(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception { byte[] rowkey = null; if (entity.getEncodedRowkey() != null && !(entity.getEncodedRowkey().isEmpty())) { rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey()); } else { InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef); rowkey = RowkeyBuilder.buildRowkey(log); } return rowkey; }