@Override public void writeBytes(String s) { checkLimit(this,s.length()); super.writeBytes(s); }
@Override public void writeBytes(String s) { checkLimit(this,s.length()); super.writeBytes(s); }
private int writeByteData(int localSum, Pipe<RawDataSchema> localInput2, int roomAvail) { //we were told there was content. assert (Pipe.hasContentToRead(localInput2)); int msgIdx2 = Pipe.takeMsgIdx(localInput2); int length = input2Reader.openLowLevelAPIField(); localSum += (length>>3); writer.writePackedInt(ESCAPE_VALUE); writer.writePackedInt(length); if (length > roomAvail) { DataOutputBlobWriter.writeBytes(writer,input2Reader,roomAvail); } else { DataOutputBlobWriter.writeBytes(writer,input2Reader,length); //only release when we are fully done Pipe.releaseReadLock(localInput2); } Pipe.confirmLowLevelRead(localInput2, Pipe.sizeOf(localInput2, msgIdx2)); return localSum; }
int rem = DataInputBlobReader.bytesRemaining(input2Reader); if (rem<=localOutput.maxVarLen) { DataOutputBlobWriter.writeBytes(localWriter,input2Reader,rem); Pipe.releaseReadLock(localInput2); } else { DataOutputBlobWriter.writeBytes(localWriter,input2Reader,localOutput.maxVarLen);
private void writeBytesToOutput(Pipe<PhastCodecSchema> output1, Pipe<RawDataSchema> output2, DataOutputBlobWriter<RawDataSchema> output2Writer, DataInputBlobReader<RawDataSchema> reader) { Pipe.addMsgIdx(output1, PhastCodecSchema.MSG_BLOBCHUNK_1000); Pipe.confirmLowLevelWrite(output1, Pipe.sizeOf(output1, PhastCodecSchema.MSG_BLOBCHUNK_1000)); Pipe.addMsgIdx(output2, RawDataSchema.MSG_CHUNKEDSTREAM_1); output2Writer.openField(); DataOutputBlobWriter.writeBytes(output2Writer, reader, bytesRemainingToCopy); output2Writer.closeLowLevelField(); bytesRemainingToCopy = 0; Pipe.confirmLowLevelWrite(output2, Pipe.sizeOf(output2, RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.publishWrites(output2); Pipe.publishWrites(output1); }