@Override public void open() throws HyracksDataException { if (index == 0) { writer.open(); } }
@Override public void open() throws HyracksDataException { for (int i = 0; i < pWriters.length; ++i) { isWriterOpen[i] = true; pWriters[i].open(); } }
@Override public void open() throws HyracksDataException { if (idx == 0) { writer.open(); } }
@Override public void open() throws HyracksDataException { for (int i = 0; i < epWriters.length; ++i) { isOpen[i] = true; epWriters[i].open(); } }
@Override public void open() throws HyracksDataException { for (int i = 0; i < numberOfNonMaterializedOutputs; i++) { isOpen[i] = true; writers[i].open(); } }
@Override public void open() throws HyracksDataException { synchronized (UnionOperator.this) { if (++nOpened == 1) { writer.open(); } } }
@Override public void open() throws HyracksDataException { isOpen = true; writer.open(); }
@Override public void open() throws HyracksDataException { writer.open(); consumerThread.start(); }
@Override public void open() throws HyracksDataException { writer.open(); openCounter.update(1); }
@Override public void open() throws HyracksDataException { tpc.initialize(); for (int i = 0; i < pWriters.length; ++i) { isOpen[i] = true; pWriters[i].open(); } }
@Override public void open() throws HyracksDataException { fta = new FrameTupleAccessor(outRecDescs[0]); currentSize = 0; finished = false; writer.open(); }
@Override public void open() throws HyracksDataException { appender.reset(frame, true); writer.open(); hdfs = new HDFSFunctions(nodeControllerInfos, hdfsConf); }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(inputRecDesc); try { writer.open(); } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { try { resultPartitionWriter = resultPartitionManager.createResultPartitionWriter(ctx, rsId, ordered, asyncMode, partition, nPartitions, maxReads); resultPartitionWriter.open(); resultSerializer.init(); } catch (HyracksException e) { throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { ActiveRuntimeId runtimeId = new ActiveRuntimeId(connectionId.getFeedId(), runtimeType.toString(), partition); try { initializeNewFeedRuntime(runtimeId); opened = true; writer.open(); } catch (Exception e) { e.printStackTrace(); throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { writer.open(); state = (JoinCacheTaskState) ctx.getStateObject( new TaskId(new ActivityId(getOperatorId(), JOIN_CACHE_ACTIVITY_ID), partition)); }
@Override public void open() throws HyracksDataException { // retrieve the range map from the state object (previous activity should have already stored it) // then deposit it into the ctx so that MToN-partition can pick it up Object stateObjKey = new TaskId(new ActivityId(odId, RANGEMAP_READER_ACTIVITY_ID), partition); RangeMapState rangeMapState = (RangeMapState) ctx.getStateObject(stateObjKey); TaskUtil.put(rangeMapKeyInContext, rangeMapState.rangeMap, ctx); writer.open(); }
@Override public void open() throws HyracksDataException { accessor = new FrameTupleAccessor(recDesc); indexHelper.open(); index = indexHelper.getIndexInstance(); try { writer.open(); initializeBulkLoader(); } catch (Exception e) { throw HyracksDataException.create(e); } }
@Override public void open() throws HyracksDataException { writer.open(); accessor = new FrameTupleAccessor(inputRecDesc); builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount()); builderData = builder.getFieldData(); appender = new FrameTupleAppender(new VSizeFrame(ctx), true); }
@Override public void open() throws HyracksDataException { if (requiresMaterialization) { state = new MaterializerTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition), numberOfMaterializedOutputs); state.open(ctx); } for (int i = 0; i < numberOfNonMaterializedOutputs; i++) { isOpen[i] = true; writers[i].open(); } }