protected DataOutputBlobWriter<T> createNewBlobWriter() { return new DataOutputBlobWriter<T>(this); }
protected DataOutputBlobWriter<T> createNewBlobWriter() { return new DataOutputBlobWriter<T>(this); }
protected DataOutputBlobWriter<T> createNewBlobWriter() { return new DataOutputBlobWriter<T>(this); }
public void startup() { pathIdx = testDataFiles.testFilePaths.length; writer = new DataOutputBlobWriter<HTTPRequestSchema>(output); }
@Override public void startup() { startup = System.currentTimeMillis(); input2Reader = new DataInputBlobReader<RawDataSchema>(input2); writer = new DataOutputBlobWriter<RawDataSchema>(output); int i = PhastCodecSchema.FROM.fieldIdScript.length; lengthLookup = new short[i]; sizeLookup = new int[i]; while (--i>=0) { lengthLookup[i] = (short) (PhastCodecSchema.FROM.fieldIdScript[i]-10000); sizeLookup[i] = PhastCodecSchema.FROM.fragDataSize[i]; } int maxFieldsPerMessage = PhastCodecSchema.FROM.messageStarts.length; maxBytesPerMessage = maxFieldsPerMessage * 10; }
@Override public void startup() { reader = new DataInputBlobReader<RawDataSchema>(input); output2Writer = new DataOutputBlobWriter<RawDataSchema>(output2); int maxValue = PhastCodecSchema.FROM.messageStarts.length+1; idxReverseLookup = new int[maxValue]; int i = PhastCodecSchema.FROM.fieldIdScript.length; while (--i>=0) { long id = PhastCodecSchema.FROM.fieldIdScript[i]; if (id>10000) { idxReverseLookup[(int)(id-10000)] = i; } } }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
public StreamRegulator(long bitPerSecond, int maxWrittenChunksInFlight, int maxWrittenChunkSizeInBytes) { PipeConfig<RawDataSchema> pipeConfig = new PipeConfig<RawDataSchema>(RawDataSchema.instance, maxWrittenChunksInFlight, maxWrittenChunkSizeInBytes); this.pipe = new Pipe<RawDataSchema>(pipeConfig); this.pipe.initBuffers(); Pipe.setPublishBatchSize(pipe, 0); Pipe.setReleaseBatchSize(pipe, maxWrittenChunksInFlight/3); if (this.pipe.blobMask<=0) { throw new UnsupportedOperationException("Pipe must have room to send blob data. Found size:"+ this.pipe.sizeOfBlobRing+" config: "+pipeConfig); } this.inputStreamFlyweight = new DataInputBlobReader<RawDataSchema>(pipe); this.outputStreamFlyweight = new DataOutputBlobWriter<RawDataSchema>(pipe); this.bitPerSecond = bitPerSecond; //TODO: may want to add latency per chunk, per startup, or per N bytes. }
@Test public void copyIntTest() throws IOException{ //create blob for test Pipe<RawDataSchema> encodedValuesToValidate = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); encodedValuesToValidate.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(encodedValuesToValidate); //create int dictionary int[] intDictionary = new int[5]; Arrays.fill(intDictionary, 0); intDictionary[2] = 5; //make it increment 2 values 0 and 5 PhastEncoder.copyInt(intDictionary, writer, 0, 0, 2, 0, false); writer.close(); }
@Test public void testEncodeString() throws IOException{ //create a new blob pipe to put a string on Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe); //encode a string on blob using the static method StringBuilder testString = new StringBuilder("This is a test"); PhastEncoder.encodeString(writer, testString , 0, 0, false); writer.close(); //check what is on the pipe DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); //should be -63 int test = reader.readPackedInt(); //the string String value = reader.readUTF(); reader.close(); String s = value.toString(); assertTrue((test==-63) && (s.compareTo("This is a test")==0)); }
@Test public void decodeStringTest() throws IOException{ Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe); DataOutputBlobWriter.writePackedInt(writer, -63); writer.writeUTF("This is a test"); writer.close(); DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(pipe); String stest = PhastDecoder.decodeString(reader, false); reader.close(); assertTrue(stest.compareTo("This is a test") == 0); }
@Test public void defaultIntTest() throws IOException{ //create a blob to test Pipe<RawDataSchema> encodedValuesToValidate = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); encodedValuesToValidate.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(encodedValuesToValidate); //make int array int[] defaultInt = new int[5]; defaultInt[3] = 4; //should encode 16 PhastEncoder.encodeDefaultInt(defaultInt, writer, 1, 1, 3, 16, false); //should encode 4 PhastEncoder.encodeDefaultInt(defaultInt, writer, 0, 1, 3, 16, false); writer.close(); DataInputBlobReader<RawDataSchema> reader = new DataInputBlobReader<RawDataSchema>(encodedValuesToValidate); int test1 = reader.readPackedInt(); //shouldnt encode anything reader.close(); }
DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(encodedValuesToValidate);
DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(encodedValuesToValidate);
Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe);
Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe);
Pipe<RawDataSchema> pipe = new Pipe<RawDataSchema>(new PipeConfig<RawDataSchema>(RawDataSchema.instance, 100, 4000)); pipe.initBuffers(); DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(pipe);
DataOutputBlobWriter<RawDataSchema> writer = new DataOutputBlobWriter<RawDataSchema>(testDataToDecode);