@VisibleForTesting public List<Token> resplitLocally(String startToken, String endToken, int numResplits) { List<Token> splitTokens = ImmutableList.of(_tokenFactory.fromString(startToken), _tokenFactory.fromString(endToken)); for (int i = 0; i < numResplits; i++) { List<Token> newTokens = new ArrayList<>(splitTokens.size() * 2 - 1); for (int j = 0; j < splitTokens.size() - 1; j++) { newTokens.add(splitTokens.get(j)); newTokens.add(ByteOrderedPartitioner.instance.midpoint(splitTokens.get(j), splitTokens.get(j + 1))); } newTokens.add(splitTokens.get(splitTokens.size() - 1)); splitTokens = newTokens; } return splitTokens; }
public static Collection<Token> deserialize(IPartitioner partitioner, DataInput in) throws IOException { Collection<Token> tokens = new ArrayList<Token>(); while (true) { int size = in.readInt(); if (size < 1) break; logger.trace("Reading token of {}", FBUtilities.prettyPrintMemory(size)); byte[] bintoken = new byte[size]; in.readFully(bintoken); tokens.add(partitioner.getTokenFactory().fromByteArray(ByteBuffer.wrap(bintoken))); } return tokens; } }
private static Set<String> tokensAsSet(Collection<Token> tokens) { if (tokens.isEmpty()) return Collections.emptySet(); Token.TokenFactory factory = StorageService.instance.getTokenFactory(); Set<String> s = new HashSet<>(tokens.size()); for (Token tk : tokens) s.add(factory.toString(tk)); return s; }
partitioner.getTokenFactory().validate(token);
byte[] buf = new byte[readVInt()]; readFully(buf); return DatabaseDescriptor.getPartitioner().getTokenFactory().fromByteArray(ByteBuffer.wrap(buf)); default: throw new IOException("Can't read unknown type [" + type + "]");
/** * Recursive function that splits a given token range to a given number of token ranges. * * @param range the token range to be splitted. * @param partitioner the cassandra partitioner. * @param bisectFactor the actual number of pieces the original token range will be splitted to. * @param accumulator a token range accumulator (ne */ private static void bisectTokeRange( DeepTokenRange range, final IPartitioner partitioner, final int bisectFactor, final List<DeepTokenRange> accumulator) { final AbstractType tkValidator = partitioner.getTokenValidator(); Token leftToken = partitioner.getTokenFactory().fromByteArray(tkValidator.decompose(range.getStartToken())); Token rightToken = partitioner.getTokenFactory().fromByteArray(tkValidator.decompose(range.getEndToken())); Token midToken = partitioner.midpoint(leftToken, rightToken); Comparable midpoint = (Comparable) tkValidator.compose(tkValidator.fromString(midToken.toString())); DeepTokenRange left = new DeepTokenRange(range.getStartToken(), midpoint, range.getReplicas()); DeepTokenRange right = new DeepTokenRange(midpoint, range.getEndToken(), range.getReplicas()); if (bisectFactor / 2 <= 1) { accumulator.add(left); accumulator.add(right); } else { bisectTokeRange(left, partitioner, bisectFactor / 2, accumulator); bisectTokeRange(right, partitioner, bisectFactor / 2, accumulator); } }
@Timed (name = "bv.emodb.sor.AstyanaxDataReaderDAO.getSplits", absolute = true) @Override public List<String> getSplits(Table tbl, int recordsPerSplit, int localResplits) throws TimeoutException { checkNotNull(tbl, "table"); checkArgument(recordsPerSplit > 0); checkArgument(localResplits >= 0); try { List<String> splits = new ArrayList<>(); List<CfSplit> cfSplits = getCfSplits(tbl, recordsPerSplit); for (CfSplit split : cfSplits) { List<Token> splitTokens = resplitLocally(split.getStartToken(), split.getEndToken(), localResplits); for (int i = 0; i < splitTokens.size() -1; i++) { splits.add(SplitFormat.encode(new ByteBufferRangeImpl(_tokenFactory.toByteArray(splitTokens.get(i)), _tokenFactory.toByteArray(splitTokens.get(i + 1)), -1, false))); } } // Randomize the splits so, if processed somewhat in parallel, requests distribute around the ring. Collections.shuffle(splits); return splits; } catch (Exception e) { if (isTimeoutException(e)) { throw new TimeoutException(); } else { throw Throwables.propagate(e); } } }
@Timed (name = "bv.emodb.sor.AstyanaxDataReaderDAO.getSplits", absolute = true) @Override public List<String> getSplits(Table tbl, int recordsPerSplit, int localResplits) throws TimeoutException { checkNotNull(tbl, "table"); checkArgument(recordsPerSplit > 0); checkArgument(localResplits >= 0); try { List<String> splits = new ArrayList<>(); List<CfSplit> cfSplits = getCfSplits(tbl, recordsPerSplit); for (CfSplit split : cfSplits) { List<Token> splitTokens = resplitLocally(split.getStartToken(), split.getEndToken(), localResplits); for (int i = 0; i < splitTokens.size() -1; i++) { splits.add(SplitFormat.encode(new ByteBufferRangeImpl(_tokenFactory.toByteArray(splitTokens.get(i)), _tokenFactory.toByteArray(splitTokens.get(i + 1)), -1, false))); } } // Randomize the splits so, if processed somewhat in parallel, requests distribute around the ring. Collections.shuffle(splits); return splits; } catch (Exception e) { if (isTimeoutException(e)) { throw new TimeoutException(); } else { throw Throwables.propagate(e); } } }
@Timed (name = "bv.emodb.sor.AstyanaxDataReaderDAO.getSplits", absolute = true) @Override public List<String> getSplits(Table tbl, int recordsPerSplit, int localResplits) throws TimeoutException { checkNotNull(tbl, "table"); checkArgument(recordsPerSplit > 0); checkArgument(localResplits >= 0); try { List<String> splits = new ArrayList<>(); List<CfSplit> cfSplits = getCfSplits(tbl, recordsPerSplit); for (CfSplit split : cfSplits) { List<Token> splitTokens = resplitLocally(split.getStartToken(), split.getEndToken(), localResplits); for (int i = 0; i < splitTokens.size() -1; i++) { splits.add(SplitFormat.encode(new ByteBufferRangeImpl(_tokenFactory.toByteArray(splitTokens.get(i)), _tokenFactory.toByteArray(splitTokens.get(i + 1)), -1, false))); } } // Randomize the splits so, if processed somewhat in parallel, requests distribute around the ring. Collections.shuffle(splits); return splits; } catch (Exception e) { if (isTimeoutException(e)) { throw new TimeoutException(); } else { throw Throwables.propagate(e); } } }
private Token getTokenBound(Bound b, QueryOptions options, IPartitioner p) throws InvalidRequestException { assert onToken; Restriction restriction = keyRestrictions[0]; assert !restriction.isMultiColumn() : "Unexpectedly got a multi-column restriction on a partition key for a range query"; SingleColumnRestriction keyRestriction = (SingleColumnRestriction)restriction; ByteBuffer value; if (keyRestriction.isEQ()) { value = keyRestriction.values(options).get(0); } else { SingleColumnRestriction.Slice slice = (SingleColumnRestriction.Slice)keyRestriction; if (!slice.hasBound(b)) return p.getMinimumToken(); value = slice.bound(b, options); } if (value == null) throw new InvalidRequestException("Invalid null token value"); return p.getTokenFactory().fromByteArray(value); }
public static Collection<Token> deserialize(IPartitioner partitioner, DataInput in) throws IOException { Collection<Token> tokens = new ArrayList<Token>(); while (true) { int size = in.readInt(); if (size < 1) break; logger.trace("Reading token of {} bytes", size); byte[] bintoken = new byte[size]; in.readFully(bintoken); tokens.add(partitioner.getTokenFactory().fromByteArray(ByteBuffer.wrap(bintoken))); } return tokens; } }
public static Collection<Token> deserialize(IPartitioner partitioner, DataInput in) throws IOException { Collection<Token> tokens = new ArrayList<Token>(); while (true) { int size = in.readInt(); if (size < 1) break; logger.trace("Reading token of {}", FBUtilities.prettyPrintMemory(size)); byte[] bintoken = new byte[size]; in.readFully(bintoken); tokens.add(partitioner.getTokenFactory().fromByteArray(ByteBuffer.wrap(bintoken))); } return tokens; } }
public static Collection<Token> deserialize(IPartitioner partitioner, DataInput in) throws IOException { Collection<Token> tokens = new ArrayList<Token>(); while (true) { int size = in.readInt(); if (size < 1) break; logger.trace("Reading token of {}", FBUtilities.prettyPrintMemory(size)); byte[] bintoken = new byte[size]; in.readFully(bintoken); tokens.add(partitioner.getTokenFactory().fromByteArray(ByteBuffer.wrap(bintoken))); } return tokens; } }
/** * Handle node moving inside the ring. * * @param endpoint moving endpoint address * @param pieces STATE_MOVING, token */ private void handleStateMoving(InetAddress endpoint, String[] pieces) { assert pieces.length >= 2; Token token = getPartitioner().getTokenFactory().fromString(pieces[1]); if (logger.isDebugEnabled()) logger.debug("Node {} state moving, new token {}", endpoint, token); tokenMetadata.addMovingEndpoint(token, endpoint); PendingRangeCalculatorService.instance.update(); }
@VisibleForTesting public List<Token> resplitLocally(String startToken, String endToken, int numResplits) { List<Token> splitTokens = ImmutableList.of(_tokenFactory.fromString(startToken), _tokenFactory.fromString(endToken)); for (int i = 0; i < numResplits; i++) { List<Token> newTokens = new ArrayList<>(splitTokens.size() * 2 - 1); for (int j = 0; j < splitTokens.size() - 1; j++) { newTokens.add(splitTokens.get(j)); newTokens.add(ByteOrderedPartitioner.instance.midpoint(splitTokens.get(j), splitTokens.get(j + 1))); } newTokens.add(splitTokens.get(splitTokens.size() - 1)); splitTokens = newTokens; } return splitTokens; }
public Collection<Range<Token>> paramsAsTokenRanges(String key) { String value = param(key); if (value != null) { Collection<Range<Token>> tokenRanges = new ArrayList<Range<Token>>(); Token.TokenFactory tokenFactory = DatabaseDescriptor.getPartitioner().getTokenFactory(); StringTokenizer stk = new StringTokenizer(value, "{[(,)]}"); while (stk.hasMoreTokens()) { Token leftToken = tokenFactory.fromString(stk.nextToken()); Token rightToken = tokenFactory.fromString(stk.nextToken()); tokenRanges.add(new Range(leftToken, rightToken)); } return tokenRanges; } return null; } }
private ByteBuffer parseTokenString(String string) { return _tokenFactory.toByteArray(_tokenFactory.fromString(string)); }
private String toTokenString(ByteBuffer bytes) { return _tokenFactory.toString(_tokenFactory.fromByteArray(bytes)); }
private String toTokenString(ByteBuffer bytes) { return _tokenFactory.toString(_tokenFactory.fromByteArray(bytes)); }
/** * Handle node moving inside the ring. * * @param endpoint moving endpoint address * @param pieces STATE_MOVING, token */ private void handleStateMoving(InetAddress endpoint, String[] pieces) { assert pieces.length >= 2; Token token = getTokenFactory().fromString(pieces[1]); if (logger.isDebugEnabled()) logger.debug("Node {} state moving, new token {}", endpoint, token); tokenMetadata.addMovingEndpoint(token, endpoint); PendingRangeCalculatorService.instance.update(); }