@NotNull public static Metadata readMetadata(@NotNull InputStream inputStream) throws IOException { try { Metadata metadata = new Metadata(); new HeifReader().extract(metadata, inputStream, new HeifBoxHandler(metadata)); return metadata; } catch (DataFormatException e) { e.printStackTrace(); } return null; } }
/** Inflate the given byte buffer into this VEXBlock's data field. */ private void inflate (byte[] input) { data = new byte[BUFFER_SIZE]; int pos = 0; Inflater inflater = new Inflater(); inflater.setInput(input, 0, input.length); try { while (!inflater.finished()) { pos += inflater.inflate(data, pos, data.length - pos); } } catch (DataFormatException e) { e.printStackTrace(); pos = 0; } inflater.end(); nBytes = pos; }
public static byte[] uncompress(byte[] input) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); Inflater decompressor = new Inflater(); try { decompressor.setInput(input); final byte[] buf = new byte[2048]; while (!decompressor.finished()) { int count = 0; try { count = decompressor.inflate(buf); } catch (DataFormatException e) { e.printStackTrace(); } bos.write(buf, 0, count); } } finally { decompressor.end(); } return bos.toByteArray(); }
@Override public void deserializeAttribute(byte[] data, CloudioAttribute.InternalAttribute attribute) throws CloudioAttributeConstraintException, NumberFormatException, IOException { try { super.deserializeAttribute(decompress(Arrays.copyOfRange(data, 1, data.length)), attribute); } catch (DataFormatException exception) { log.error("Exception: " + exception.getMessage()); exception.printStackTrace(); } }
public static void main(String[] args) { try { byte[][] filesToBytes = filesToBytes(new String[] {"/export/abhishek1/work/aspenmm/GFTryouts/test.json"}); System.out.println(filesToBytes[1].length); DeflaterInflaterData compressBytes = compressBytes(filesToBytes[1]); System.out.println(compressBytes); DeflaterInflaterData uncompressBytes = uncompressBytes(compressBytes.data, compressBytes.dataLength); System.out.println(uncompressBytes); System.out.println(new String(uncompressBytes.getData())); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (DataFormatException e) { e.printStackTrace(); } }
public static void main(String[] args) { try { byte[][] filesToBytes = filesToBytes(new String[] {"/export/abhishek1/work/aspenmm/GFTryouts/test.json"}); System.out.println(filesToBytes[1].length); DeflaterInflaterData compressBytes = compressBytes(filesToBytes[1]); System.out.println(compressBytes); DeflaterInflaterData uncompressBytes = uncompressBytes(compressBytes.data, compressBytes.dataLength); System.out.println(uncompressBytes); System.out.println(new String(uncompressBytes.getData())); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (DataFormatException e) { e.printStackTrace(); } }
public void readCompressed(int plen, int dlen) throws IOException{ if(dlen >= c.threshold){ //if the data length is less than we set in login packet 3, throw an error byte[] data = new byte[plen]; c.in.readFully(data, 0, plen); Inflater inflater = new Inflater(); inflater.setInput(data); byte[] uncompressed = new byte[dlen]; try{ inflater.inflate(uncompressed); }catch(DataFormatException dataformatexception){ dataformatexception.printStackTrace(); throw new IOException("Bad compressed data format"); }finally{ inflater.end(); } ByteArrayDataInputWrapper buf = new ByteArrayDataInputWrapper(uncompressed); //the ONLY reason we do this is because stupid minecraft made packets compress and changing it any other way means re-doing 68 packets int type = Packet.readVarInt(buf); forwardPacket(dlen, type, buf); }else{ throw new IOException("Data was smaller than threshold!"); } }
e.printStackTrace(); return -1;
/** Parse out and decompress the data part of a fileblock helper function. */ FileBlock parseData(byte buf[]) throws InvalidProtocolBufferException { FileBlock out = FileBlock.newInstance(type, null, indexdata); Fileformat.Blob blob = Fileformat.Blob.parseFrom(buf); if (blob.hasRaw()) { out.data = blob.getRaw(); } else if (blob.hasZlibData()) { byte buf2[] = new byte[blob.getRawSize()]; Inflater decompresser = new Inflater(); decompresser.setInput(blob.getZlibData().toByteArray()); // decompresser.getRemaining(); try { decompresser.inflate(buf2); } catch (DataFormatException e) { e.printStackTrace(); throw new Error(e); } assert (decompresser.finished()); decompresser.end(); out.data = ByteString.copyFrom(buf2); } return out; }
} catch (DataFormatException ex) { log.error("ERROR on inflation " + ex.getMessage()); ex.printStackTrace(); throw new IOException(ex.getMessage());
@Override public void onReceived(double timestamp_utc, Object payload) { try { // This will replace all the internal members of the tree! unpack((byte[]) payload); // After unpacking, we must ensure this notify handler is re-attached ConfigNode tree_bin = getNode("ADMIN:TREE"); tree_bin.addNotifyHandler(this); code_list = getShortCodeMap(); enumerate(); CRC32 crc = new CRC32(); crc.update((byte[]) payload); final int crcvalue = (int)crc.getValue(); Log.d(TAG, "CALC CRC: " + Integer.toHexString(crcvalue)); getNode("ADMIN:CRC32").value = crcvalue; Util.setPreference(crcToPrefKey(crcvalue),(byte[])payload); } catch (DataFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } });
ex.printStackTrace(); throw new IOException( ex.getMessage());
ex.printStackTrace(); throw new IOException(ex.getMessage());
ex.printStackTrace(); throw new IOException(ex.getMessage());
e.printStackTrace(); debug.writeException(e); throw new Error(e.getMessage());
ex.printStackTrace(); throw new IOException(ex.getMessage());
} catch (DataFormatException ex) { System.out.println("ERROR on inflation " + ex.getMessage()); ex.printStackTrace(); throw new IOException(ex.getMessage());
e.printStackTrace(); debug.writeException(e); throw new Error(e.getMessage());