String generateToken(String consumerAppId, Date generationTime, String consumerTokenSalt) { return Hashing.sha1().hashString(KEY_JOINER.join(consumerAppId, TIMESTAMP_FORMAT.format (generationTime), consumerTokenSalt), Charsets.UTF_8).toString(); }
private String sha1(final CharSequence source) { return BaseEncoding.base16() .encode(Hashing .sha1() .hashString(source, charset) .asBytes()) .substring(0, 8).toLowerCase(); }
private static String computeProcessingID(final String dataSource, final List<DataSegment> segments) { final String segmentIDs = Joiner.on("_").join( Iterables.transform( Ordering.natural().sortedCopy(segments), new Function<DataSegment, String>() { @Override public String apply(DataSegment x) { return StringUtils.format( "%s_%s_%s_%s", x.getInterval().getStart(), x.getInterval().getEnd(), x.getVersion(), x.getShardSpec().getPartitionNum() ); } } ) ); return StringUtils.format( "%s_%s", dataSource, Hashing.sha1().hashString(segmentIDs, StandardCharsets.UTF_8).toString() ); }
Hashing.sha1().hashBytes(newCommitMetadataBytes).asBytes() );
private String sha1(final File dir, final File sprite, final File css) throws IOException { try (Stream<Path> stream = Files.walk(dir.toPath())) { Hasher sha1 = Hashing.sha1().newHasher(); stream.filter(p -> !Files.isDirectory(p)) .forEach(p -> Try.run(() -> sha1.putBytes(Files.readAllBytes(p)))); if (sprite.exists()) { sha1.putBytes(Files.readAllBytes(sprite.toPath())); } if (css.exists()) { sha1.putBytes(Files.readAllBytes(css.toPath())); } return BaseEncoding.base16().encode(sha1.hash().asBytes()).toLowerCase(); } }
public void testFromStringFailsWithUpperCaseString() { String string = Hashing.sha1().hashString("foo", Charsets.US_ASCII).toString().toUpperCase(); try { HashCode.fromString(string); fail(); } catch (IllegalArgumentException expected) { } }
public void testRoundTripHashCodeUsingFromString() { HashCode hash1 = Hashing.sha1().hashString("foo", Charsets.US_ASCII); HashCode hash2 = HashCode.fromString(hash1.toString()); assertEquals(hash1, hash2); }
public void testRoundTripHashCodeUsingBaseEncoding() { HashCode hash1 = Hashing.sha1().hashString("foo", Charsets.US_ASCII); HashCode hash2 = HashCode.fromBytes(BaseEncoding.base16().lowerCase().decode(hash1.toString())); assertEquals(hash1, hash2); }
public void testHashTwice() { Hasher sha1 = Hashing.sha1().newHasher(); assertEquals( "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", sha1.putString("The quick brown fox jumps over the lazy dog", Charsets.UTF_8) .hash() .toString()); try { sha1.hash(); fail(); } catch (IllegalStateException expected) { } }
@Description("compute sha1 hash") @ScalarFunction @SqlType(StandardTypes.VARBINARY) public static Slice sha1(@SqlType(StandardTypes.VARBINARY) Slice slice) { return Slices.wrappedBuffer(Hashing.sha1().hashBytes(slice.getBytes()).asBytes()); }
public void testPutAfterHash() { Hasher sha1 = Hashing.sha1().newHasher(); assertEquals( "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", sha1.putString("The quick brown fox jumps over the lazy dog", Charsets.UTF_8) .hash() .toString()); try { sha1.putInt(42); fail(); } catch (IllegalStateException expected) { } }
public void testToString() { assertEquals("Hashing.md5()", Hashing.md5().toString()); assertEquals("Hashing.sha1()", Hashing.sha1().toString()); assertEquals("Hashing.sha256()", Hashing.sha256().toString()); assertEquals("Hashing.sha512()", Hashing.sha512().toString()); }
@Override public boolean insertDataSourceMetadata(String dataSource, DataSourceMetadata metadata) { return 1 == connector.getDBI().inTransaction( (handle, status) -> handle .createStatement( StringUtils.format( "INSERT INTO %s (dataSource, created_date, commit_metadata_payload, commit_metadata_sha1) VALUES" + " (:dataSource, :created_date, :commit_metadata_payload, :commit_metadata_sha1)", dbTables.getDataSourceTable() ) ) .bind("dataSource", dataSource) .bind("created_date", DateTimes.nowUtc().toString()) .bind("commit_metadata_payload", jsonMapper.writeValueAsBytes(metadata)) .bind("commit_metadata_sha1", BaseEncoding.base16().encode( Hashing.sha1().hashBytes(jsonMapper.writeValueAsBytes(metadata)).asBytes())) .execute() ); } }
hf = Hashing.sha1(); break;
@Nullable private String computeCurrentEtag(final Set<ServerToSegment> segments, @Nullable byte[] queryCacheKey) { Hasher hasher = Hashing.sha1().newHasher(); boolean hasOnlyHistoricalSegments = true; for (ServerToSegment p : segments) { if (!p.getServer().pick().getServer().segmentReplicatable()) { hasOnlyHistoricalSegments = false; break; } hasher.putString(p.getServer().getSegment().getId().toString(), StandardCharsets.UTF_8); } if (hasOnlyHistoricalSegments) { hasher.putBytes(queryCacheKey == null ? strategy.computeCacheKey(query) : queryCacheKey); String currEtag = StringUtils.encodeBase64String(hasher.hash().asBytes()); responseContext.put(QueryResource.HEADER_ETAG, currEtag); return currEtag; } else { return null; } }
@Test public void testID() { final String desiredPrefix = "merge_foo_" + Hashing.sha1().hashString( "2012-01-03T00:00:00.000Z_2012-01-05T00:00:00.000Z_V1_0" + "_2012-01-04T00:00:00.000Z_2012-01-06T00:00:00.000Z_V1_0" + "_2012-01-05T00:00:00.000Z_2012-01-07T00:00:00.000Z_V1_0", StandardCharsets.UTF_8 ) + "_"; Assert.assertEquals( desiredPrefix, testMergeTaskBase.getId().substring(0, desiredPrefix.length()) ); } }
Hashing.sha1() .newHasher() .putBytes(StringUtils.toUtf8(sequenceName))
} else { oldCommitMetadataSha1FromDb = BaseEncoding.base16().encode( Hashing.sha1().hashBytes(oldCommitMetadataBytesFromDb).asBytes() ); oldCommitMetadataFromDb = jsonMapper.readValue(oldCommitMetadataBytesFromDb, DataSourceMetadata.class); final byte[] newCommitMetadataBytes = jsonMapper.writeValueAsBytes(newCommitMetadata); final String newCommitMetadataSha1 = BaseEncoding.base16().encode( Hashing.sha1().hashBytes(newCommitMetadataBytes).asBytes() );
Hashing.sha1() .newHasher() .putBytes(StringUtils.toUtf8(sequenceName))