private int writePostScript(int footerLength, int metadataLength) throws IOException { OrcProto.PostScript.Builder builder = OrcProto.PostScript.newBuilder() .setCompression(writeCompressionKind(compress)) .setFooterLength(footerLength) .setMetadataLength(metadataLength) .setMagic(OrcFile.MAGIC) .addVersion(version.getMajor()) .addVersion(version.getMinor()) .setWriterVersion(OrcFile.WriterVersion.HIVE_8732.getId()); if (compress != CompressionKind.NONE) { builder.setCompressionBlockSize(bufferSize); } OrcProto.PostScript ps = builder.build(); // need to write this uncompressed long startPosn = rawWriter.getBytesWritten(); ps.writeTo(rawWriter); long length = rawWriter.getBytesWritten() - startPosn; if (length > 255) { throw new IllegalArgumentException("PostScript too large at " + length); } return (int) length; }
@Override public OrcFile.Version getFileVersion() { for (OrcFile.Version version: OrcFile.Version.values()) { if (version.getMajor() == versionList.get(0) && version.getMinor() == versionList.get(1)) { return version; } } return OrcFile.Version.V_0_11; }
/** * Check to see if this ORC file is from a future version and if so, * warn the user that we may not be able to read all of the column encodings. * @param log the logger to write any error message to * @param path the filename for error messages * @param version the version of hive that wrote the file. */ static void checkOrcVersion(Log log, Path path, List<Integer> version) { if (version.size() >= 1) { int major = version.get(0); int minor = 0; if (version.size() >= 2) { minor = version.get(1); } if (major > OrcFile.Version.CURRENT.getMajor() || (major == OrcFile.Version.CURRENT.getMajor() && minor > OrcFile.Version.CURRENT.getMinor())) { log.warn("ORC file " + path + " was written by a future Hive version " + versionString(version) + ". This file may not be readable by this version of Hive."); } } }
private int writePostScript(int footerLength, int metadataLength) throws IOException { OrcProto.PostScript.Builder builder = OrcProto.PostScript.newBuilder() .setCompression(writeCompressionKind(compress)) .setFooterLength(footerLength) .setMetadataLength(metadataLength) .setMagic(OrcFile.MAGIC) .addVersion(version.getMajor()) .addVersion(version.getMinor()) .setWriterVersion(OrcFile.WriterVersion.HIVE_8732.getId()); if (compress != CompressionKind.NONE) { builder.setCompressionBlockSize(bufferSize); } OrcProto.PostScript ps = builder.build(); // need to write this uncompressed long startPosn = rawWriter.getBytesWritten(); ps.writeTo(rawWriter); long length = rawWriter.getBytesWritten() - startPosn; if (length > 255) { throw new IllegalArgumentException("PostScript too large at " + length); } return (int) length; }
private int writePostScript(int footerLength, int metadataLength) throws IOException { OrcProto.PostScript.Builder builder = OrcProto.PostScript.newBuilder() .setCompression(writeCompressionKind(compress)) .setFooterLength(footerLength) .setMetadataLength(metadataLength) .setMagic(OrcFile.MAGIC) .addVersion(version.getMajor()) .addVersion(version.getMinor()) .setWriterVersion(OrcFile.WriterVersion.HIVE_8732.getId()); if (compress != CompressionKind.NONE) { builder.setCompressionBlockSize(bufferSize); } OrcProto.PostScript ps = builder.build(); // need to write this uncompressed long startPosn = rawWriter.getPos(); ps.writeTo(rawWriter); long length = rawWriter.getPos() - startPosn; if (length > 255) { throw new IllegalArgumentException("PostScript too large at " + length); } return (int) length; }