@Override public long length() { if (onDiskFile == null) { return super.length(); } return onDiskFile.length(); }
@Override public void writeTo(OutputStream os, ProgressMonitor pm) throws IOException { if (onDiskFile == null) { super.writeTo(os, pm); return; } if (pm == null) pm = NullProgressMonitor.INSTANCE; try (FileInputStream in = new FileInputStream(onDiskFile)) { int cnt; final byte[] buf = new byte[Block.SZ]; while ((cnt = in.read(buf)) >= 0) { os.write(buf, 0, cnt); pm.update(cnt / 1024); } } }
private ObjectId insertMergeResult(TemporaryBuffer buf, Attributes attributes) throws IOException { InputStream in = buf.openInputStream(); try (LfsInputStream is = LfsFactory.getInstance().applyCleanFilter( getRepository(), in, buf.length(), attributes.get(Constants.ATTR_MERGE))) { return getObjectInserter().insert(OBJ_BLOB, is.getLength(), is); } }
private boolean reachedInCoreLimit() throws IOException { if (inCoreLength() < inCoreLimit) return false; switchToOverflow(); return true; }
/** * Create a new wrapper around a temporary buffer. * * @param buffer * the buffer to initialize stream and length from. The * buffer will be destroyed on {@link #close()} * @throws IOException * in case of an error opening the stream to the buffer. */ public LfsInputStream(TemporaryBuffer buffer) throws IOException { this.stream = buffer.openInputStreamWithAutoDestroy(); this.length = buffer.length(); }
private static byte[] readFully(InputStream is) throws IOException { try (TemporaryBuffer b = new TemporaryBuffer.Heap(Integer.MAX_VALUE)) { b.copy(is); return b.toByteArray(); } }
final long len = buf.length(); for (int curAttempt = 0; curAttempt < maxAttempts; curAttempt++) { final HttpURLConnection c = open("PUT", bucket, key); //$NON-NLS-1$ monitor.beginTask(monitorTask, (int) (len / 1024)); try (OutputStream os = c.getOutputStream()) { buf.writeTo(os, monitor); } finally { monitor.endTask();
public void close() throws IOException { super.close(); if (256 < out.length() && acceptsGzipEncoding(req)) { TemporaryBuffer gzbuf = new TemporaryBuffer.Heap(LIMIT); try { GZIPOutputStream gzip = new GZIPOutputStream(gzbuf); try { out.writeTo(gzip, null); } finally { gzip.close(); if (gzbuf.length() < out.length()) { out = gzbuf; rsp.setHeader(HDR_CONTENT_ENCODING, ENCODING_GZIP); rsp.setContentLength((int) out.length()); final OutputStream os = rsp.getOutputStream(); try { out.writeTo(os, null); os.flush(); } finally {
rc = result.getRc(); if (rc == 0) { result.getStdout().writeTo(channel, NullProgressMonitor.INSTANCE); checkoutMetadata.smudgeFilterCommand, entry.getPathString(), result.getStdout().toByteArray(MAX_EXCEPTION_TEXT_SIZE), RawParseUtils.decode(result.getStderr() .toByteArray(MAX_EXCEPTION_TEXT_SIZE))));
private static byte[] readFully(final InputStream is) throws IOException { TemporaryBuffer b = new TemporaryBuffer.Heap(Integer.MAX_VALUE); b.copy(is); b.close(); return b.toByteArray(); }
/** {@inheritDoc} */ @Override public void write(int b) throws IOException { if (overflow != null) { overflow.write(b); return; } Block s = last(); if (s.isFull()) { if (reachedInCoreLimit()) { overflow.write(b); return; } s = new Block(); blocks.add(s); } s.buffer[s.count++] = (byte) b; }
@Override public void destroy() { super.destroy(); if (onDiskFile != null) { try { if (!onDiskFile.delete()) onDiskFile.deleteOnExit(); } finally { onDiskFile = null; } } } }
@Override public InputStream openInputStream() throws IOException { if (onDiskFile == null) return super.openInputStream(); return new FileInputStream(onDiskFile); }
private void execute() throws IOException, ClientProtocolException { if (resp == null) if (entity != null) { if (req instanceof HttpEntityEnclosingRequest) { HttpEntityEnclosingRequest eReq = (HttpEntityEnclosingRequest) req; eReq.setEntity(entity); } resp = getClient().execute(req); entity.getBuffer().close(); entity = null; } else resp = getClient().execute(req); }
private InputStream filterClean(InputStream in, OperationType opType) throws IOException { in = handleAutoCRLF(in, opType); String filterCommand = getCleanFilterCommand(); if (filterCommand != null) { FS fs = repository.getFS(); ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand, new String[0]); filterProcessBuilder.directory(repository.getWorkTree()); filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY, repository.getDirectory().getAbsolutePath()); ExecutionResult result; try { result = fs.execute(filterProcessBuilder, in); } catch (IOException | InterruptedException e) { throw new IOException(new FilterFailedException(e, filterCommand, getEntryPathString())); } int rc = result.getRc(); if (rc != 0) { throw new IOException(new FilterFailedException(rc, filterCommand, getEntryPathString(), result.getStdout().toByteArray(MAX_EXCEPTION_TEXT_SIZE), RawParseUtils.decode(result.getStderr() .toByteArray(MAX_EXCEPTION_TEXT_SIZE)))); } return result.getStdout().openInputStream(); } return in; }
private void switchToOverflow() throws IOException { overflow = overflow(); final Block last = blocks.remove(blocks.size() - 1); for (Block b : blocks) overflow.write(b.buffer, 0, b.count); blocks = null; overflow = new BufferedOutputStream(overflow, Block.SZ); overflow.write(last.buffer, 0, last.count); }
/** * Obtain the length (in bytes) of the buffer. * <p> * The length is only accurate after {@link #close()} has been invoked. * * @return total length of the buffer, in bytes. */ public long length() { return inCoreLength(); }
private long inCoreLength() { final Block last = last(); return ((long) blocks.size() - 1) * Block.SZ + last.count; }