public JobSpecification(int frameSize) { roots = new ArrayList<>(); resultSetIds = new ArrayList<>(); opMap = new HashMap<>(); connMap = new HashMap<>(); opInputMap = new HashMap<>(); opOutputMap = new HashMap<>(); connectorOpMap = new HashMap<>(); properties = new HashMap<>(); userConstraints = new HashSet<>(); operatorIdCounter = 0; connectorIdCounter = 0; maxReattempts = 0; useConnectorPolicyForScheduling = false; requiredClusterCapacity = new ClusterCapacity(); setFrameSize(frameSize); }
public static void runJob(JobSpecification spec, String appName) throws Exception { spec.setFrameSize(FRAME_SIZE); JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME)); hcc.waitForCompletion(jobId); }
private static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed feed, MetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); spec.setFrameSize(metadataProvider.getApplicationContext().getCompilerProperties().getFrameSize()); IAdapterFactory adapterFactory; IOperatorDescriptor feedIngestor; AlgebricksPartitionConstraint ingesterPc; Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t = metadataProvider.buildFeedIntakeRuntime(spec, feed, policyAccessor); feedIngestor = t.first; ingesterPc = t.second; adapterFactory = t.third; AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc); NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc); spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0); spec.addRoot(nullSink); return Pair.of(spec, adapterFactory); }
JobSpecification spec = new JobSpecification(); spec.setFrameSize(frameSize); IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits); FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
public void compile(String name, Reader query, CompilerControlBlock ccb, int optimizationLevel) throws SystemException { moduleNode = XMLQueryParser.parse(name, query); listener.notifyParseResult(moduleNode); module = new XMLQueryTranslator(ccb).translateModule(moduleNode); pprinter = new LogicalOperatorPrettyPrintVisitor( new VXQueryLogicalExpressionPrettyPrintVisitor(module.getModuleContext())); VXQueryMetadataProvider mdProvider = new VXQueryMetadataProvider(nodeList, ccb.getSourceFileMap(), module.getModuleContext(), this.hdfsConf, nodeControllerInfos); compiler = cFactory.createCompiler(module.getBody(), mdProvider, 0); listener.notifyTranslationResult(module); XMLQueryTypeChecker.typeCheckModule(module); listener.notifyTypecheckResult(module); try { compiler.optimize(); } catch (AlgebricksException e) { throw new SystemException(ErrorCode.SYSE0001, e); } listener.notifyOptimizedResult(module); JobSpecification jobSpec; try { jobSpec = compiler.createJob(null, null); jobSpec.setFrameSize(frameSize); } catch (AlgebricksException e) { throw new SystemException(ErrorCode.SYSE0001, e); } module.setHyracksJobSpecification(jobSpec); listener.notifyCodegenResult(module); }