/** * returns size of this message as defined in the FROM and schema in this pipe. * @param pipe Pipe source * @param msgIdx int message idx * @return int size of message as defined by FROM */ public static <S extends MessageSchema<S>> int sizeOf(Pipe<S> pipe, int msgIdx) { return sizeOf(pipe.schema, msgIdx); }
/** * returns size of this message as defined in the FROM and schema in this pipe. * @param pipe Pipe source * @param msgIdx int message idx * @return int size of message as defined by FROM */ public static <S extends MessageSchema<S>> int sizeOf(Pipe<S> pipe, int msgIdx) { return sizeOf(pipe.schema, msgIdx); }
private static <S extends MessageSchema<S>> boolean verifySize(Pipe<S> output, int size) { try { assert(Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) == size) : "Did not write the same size fragment as expected, double check message. expected:" +Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) +" but was passed "+size+" for schema "+Pipe.schemaName(output) +" and assumed MsgId of "+output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]; } catch (ArrayIndexOutOfBoundsException aiex) { //ignore, caused by some poor unit tests which need to be re-written. } return true; }
private static <S extends MessageSchema<S>> boolean verifySize(Pipe<S> output, int size) { try { assert(Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) == size) : "Did not write the same size fragment as expected, double check message. expected:" +Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) +" but was passed "+size+" for schema "+Pipe.schemaName(output) +" and assumed MsgId of "+output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]; } catch (ArrayIndexOutOfBoundsException aiex) { //ignore, caused by some poor unit tests which need to be re-written. } return true; }
private static <S extends MessageSchema<S>> boolean verifySize(Pipe<S> output, int size) { try { assert(Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) == size) : "Did not write the same size fragment as expected, double check message. expected:" +Pipe.sizeOf(output, output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]) +" but was passed "+size+" for schema "+Pipe.schemaName(output) +" and assumed MsgId of "+output.slabRing[output.slabMask&(int)output.llRead.llwConfirmedPosition]; } catch (ArrayIndexOutOfBoundsException aiex) { //ignore, caused by some poor unit tests which need to be re-written. } return true; }
@Override public void visitSequenceOpen(String name, long id, int length) { int tempLen; if ((tempLen=Pipe.takeInt(expectedInput))!=length) { throw new AssertionError("expected length: "+Long.toHexString(tempLen)+" but got "+Long.toHexString(length)); } needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); }
@Override public void visitSequenceOpen(String name, long id, int length) { int tempLen; if ((tempLen=Pipe.takeInt(expectedInput))!=length) { throw new AssertionError("expected length: "+Long.toHexString(tempLen)+" but got "+Long.toHexString(length)); } needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); }
/** * Move position forward. ChanelReader is invalid and beginRead() must be called again. */ public void commitRead() { if (isReading) { Pipe.confirmLowLevelRead(pipe, Pipe.sizeOf(RawDataSchema.instance,RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.releaseReadLock(pipe); } isReading = false; }
@Override public void visitTemplateClose(String name, long id) { if (needsClose) { needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); } }
@Override public void visitTemplateClose(String name, long id) { if (needsClose) { needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); } }
@Override public void visitFragmentClose(String name, long id) { if (needsClose) { needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); } }
@Override public void visitFragmentClose(String name, long id) { if (needsClose) { needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); } }
@Override public void visitTemplateClose(String name, long id) { if (needsClose) { needsClose = false; Pipe.confirmLowLevelRead(expectedInput, Pipe.sizeOf(expectedInput, activeCursor)); Pipe.releaseReadLock(expectedInput); } }
private void publishOpenWrite() { //log.trace("write block"); totalBytesWritten = totalBytesWritten + (outputStreamFlyweight.closeLowLevelField()); Pipe.confirmLowLevelWrite(pipe, Pipe.sizeOf(pipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.publishWrites(pipe); hasOpenWrite = false; }
private void publishOpenWrite() { //log.trace("write block"); totalBytesWritten = totalBytesWritten + (outputStreamFlyweight.closeLowLevelField()); Pipe.confirmLowLevelWrite(pipe, Pipe.sizeOf(pipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.publishWrites(pipe); hasOpenWrite = false; }
@Override public void startup() { int i = outputPipe.length; this.head = new long[i]; while (--i>=0) { this.head[i] = Pipe.headPosition(outputPipe[i]); } maxMsgSize = Pipe.sizeOf(outputPipe[0], PhastCodecSchema.MSG_MAX_FIELDS); }
public static long blobQuery(TrieParserReader reader, TrieParser trie) { Pipe.outputStream(reader.workingPipe).closeLowLevelField(); Pipe.publishWrites(reader.workingPipe); Pipe.confirmLowLevelWrite(reader.workingPipe, Pipe.sizeOf(reader.workingPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); /// Pipe.takeMsgIdx(reader.workingPipe); long result = TrieParserReader.query(reader,trie,reader.workingPipe,-1); Pipe.confirmLowLevelRead(reader.workingPipe, Pipe.sizeOf(reader.workingPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.releaseReadLock(reader.workingPipe); return result; }
public static long blobQuery(TrieParserReader reader, TrieParser trie) { Pipe.outputStream(reader.workingPipe).closeLowLevelField(); Pipe.publishWrites(reader.workingPipe); Pipe.confirmLowLevelWrite(reader.workingPipe, Pipe.sizeOf(reader.workingPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); /// Pipe.takeMsgIdx(reader.workingPipe); long result = TrieParserReader.query(reader,trie,reader.workingPipe,-1); Pipe.confirmLowLevelRead(reader.workingPipe, Pipe.sizeOf(reader.workingPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); Pipe.releaseReadLock(reader.workingPipe); return result; }
private void writeBegin(Pipe<NetPayloadSchema> pipe) { int msgIdx = Pipe.takeMsgIdx(pipe); int seq = Pipe.takeInt(pipe); Pipe.confirmLowLevelRead(pipe, Pipe.sizeOf(pipe, NetPayloadSchema.MSG_BEGIN_208)); Pipe.releaseReadLock(pipe); }
public static void appendNextFieldToReader(LittleEndianDataInputBlobReader reader, Pipe<RawDataSchema> targetPipe) { while (Pipe.hasContentToRead(targetPipe) && Pipe.peekInt(targetPipe) >=0) { Pipe.takeMsgIdx(targetPipe); accumLowLevelAPIField(reader); Pipe.readNextWithoutReleasingReadLock(targetPipe); Pipe.confirmLowLevelRead(targetPipe, Pipe.sizeOf(targetPipe, RawDataSchema.MSG_CHUNKEDSTREAM_1)); } }