@Override public byte[] getBytes() { return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON)); }
/** * Convert a string of lines that look like: * "68 72 70 63 02 00 00 00 82 00 1d 6f 72 67 2e 61 hrpc.... ...org.a" * .. into an array of bytes. */ private static byte[] hexDumpToBytes(String hexdump) { final int LAST_HEX_COL = 3 * 16; StringBuilder hexString = new StringBuilder(); for (String line : hexdump.toUpperCase().split("\n")) { hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", "")); } return StringUtils.hexStringToByte(hexString.toString()); }
public static Object getObject(JobConf conf, String key) { String s = conf.get(key); if(s==null) return null; byte[] val = StringUtils.hexStringToByte(s); return deserialize(val); }
/** * Returns a disk id (0-based) index from the Hdfs VolumeId object. There is * currently no public API to get at the volume id. We'll have to get it by * accessing the internals. */ public static int getDiskId(VolumeId hdfsVolumeId){ // Initialize the diskId as -1 to indicate it is unknown int diskId = -1; if (hdfsVolumeId != null) { String volumeIdString = hdfsVolumeId.toString(); byte[] volumeIdBytes = StringUtils.hexStringToByte(volumeIdString); if (volumeIdBytes != null && volumeIdBytes.length == 4) { diskId = Utils.toInt(volumeIdBytes); }else if (volumeIdBytes.length == 1) { diskId = (int) volumeIdBytes[0]; // support hadoop-2.0.2 } } return diskId; }
pos = Long.parseLong(st[0]); int cap = Integer.parseInt(st[1]); byte [] hash = StringUtils.hexStringToByte(st[2]); byte [] b = new byte [cap]; r.seek(pos);
/** * Convert a string of lines that look like: * "68 72 70 63 02 00 00 00 82 00 1d 6f 72 67 2e 61 hrpc.... ...org.a" * .. into an array of bytes. */ private static byte[] hexDumpToBytes(String hexdump) { final int LAST_HEX_COL = 3 * 16; StringBuilder hexString = new StringBuilder(); for (String line : StringUtils.toUpperCase(hexdump).split("\n")) { hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", "")); } return StringUtils.hexStringToByte(hexString.toString()); }
/** * Convert a string of lines that look like: * "68 72 70 63 02 00 00 00 82 00 1d 6f 72 67 2e 61 hrpc.... ...org.a" * .. into an array of bytes. */ private static byte[] hexDumpToBytes(String hexdump) { final int LAST_HEX_COL = 3 * 16; StringBuilder hexString = new StringBuilder(); for (String line : StringUtils.toUpperCase(hexdump).split("\n")) { hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", "")); } return StringUtils.hexStringToByte(hexString.toString()); }
final String algorithm = (String)m.get("algorithm"); final int length = ((Number) m.get("length")).intValue(); final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));
final String algorithm = (String)m.get("algorithm"); final int length = ((Number) m.get("length")).intValue(); final byte[] bytes = StringUtils.hexStringToByte((String) m.get("bytes"));
final String algorithm = (String)m.get("algorithm"); final int length = ((Number) m.get("length")).intValue(); final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));
byte[] imageBytes = StringUtils.hexStringToByte( "fffffffee17c0d2700000000"); FileOutputStream fos = new FileOutputStream(imageFile);
/** * Test case for an empty edit log from a prior version of Hadoop. */ @Test public void testPreTxIdEditLogNoEdits() throws Exception { FSNamesystem namesys = Mockito.mock(FSNamesystem.class); namesys.dir = Mockito.mock(FSDirectory.class); long numEdits = testLoad( StringUtils.hexStringToByte("ffffffed"), // just version number namesys); assertEquals(0, numEdits); }
byte[] imageBytes = StringUtils.hexStringToByte( "fffffffee17c0d2700000000"); FileOutputStream fos = new FileOutputStream(imageFile);