default void addInputMapperPairs(final Map<String, String> inputMapperPairs) { if (null == getInputMapperPairs()) { setInputMapperPairs(inputMapperPairs); } else { getInputMapperPairs().putAll(inputMapperPairs); } }
default B reducers(final Integer numReduceTasks) { if (null != numReduceTasks && (null != _getOp().getMinReduceTasks() || null != _getOp().getMaxReduceTasks())) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of reducers to use or provide a min and max value."); } _getOp().setNumReduceTasks(numReduceTasks); return _self(); }
default B mappers(final Integer numMapTasks) { if (null != numMapTasks && (null != _getOp().getMinMapTasks() || null != _getOp().getMaxMapTasks())) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of mappers to use or provide a min and max value."); } _getOp().setNumMapTasks(numMapTasks); return _self(); }
default B minReducers(final Integer minReduceTasks) { if (null != minReduceTasks && null != _getOp().getNumReduceTasks()) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of reducers to use or provide a min and max value."); } _getOp().setMinReduceTasks(minReduceTasks); return _self(); }
default B minMappers(final Integer minMapTasks) { if (null != minMapTasks && null != _getOp().getNumMapTasks()) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of mappers to use or provide a min and max value."); } _getOp().setMinMapTasks(minMapTasks); return _self(); }
default B maxReducers(final Integer maxReduceTasks) { if (null != maxReduceTasks && null != _getOp().getNumReduceTasks()) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of reducers to use or provide a min and max value."); } _getOp().setMaxReduceTasks(maxReduceTasks); return _self(); }
default B maxMappers(final Integer maxMapTasks) { if (null != maxMapTasks && null != _getOp().getNumMapTasks()) { throw new IllegalArgumentException("Invalid combination of fields. " + "Either provide the number of mappers to use or provide a min and max value."); } _getOp().setMaxMapTasks(maxMapTasks); return _self(); }
/** * Creates a job with the store specific job initialisation and then applies the operation specific * {@link uk.gov.gchq.gaffer.hdfs.operation.handler.job.initialiser.JobInitialiser}. * * @param operation The operation. * @param store The store executing the operation. * @return The created job. * @throws IOException for IO issues. */ default List<Job> createJobs(final O operation, final Store store) throws IOException { final List<Job> jobs = new ArrayList<>(); Map<String, List<String>> mapperGeneratorsToInputPathsList = new HashMap<>(); for (final Map.Entry<String, String> entry : operation.getInputMapperPairs().entrySet()) { if (mapperGeneratorsToInputPathsList.containsKey(entry.getValue())) { mapperGeneratorsToInputPathsList.get(entry.getValue()).add(entry.getKey()); } else { mapperGeneratorsToInputPathsList.put(entry.getValue(), Lists.newArrayList(entry.getKey())); } } for (final String mapperGeneratorClassName : mapperGeneratorsToInputPathsList.keySet()) { final JobConf jobConf = createJobConf(operation, mapperGeneratorClassName, store); final Job job = Job.getInstance(jobConf); setupJob(job, operation, mapperGeneratorClassName, store); if (null != operation.getJobInitialiser()) { operation.getJobInitialiser().initialiseJob(job, operation, store); } jobs.add(job); } return jobs; }
private void initialiseInput(final Job job, final MapReduce operation) throws IOException { job.setInputFormatClass(TextInputFormat.class); for (final Map.Entry<String, String> entry : operation.getInputMapperPairs().entrySet()) { if (entry.getValue().contains(job.getConfiguration().get(MAPPER_GENERATOR))) { TextInputFormat.addInputPath(job, new Path(entry.getKey())); } } } }
default B addInputMapperPair(final String inputPath, final String mapperGeneratorClassName) { _getOp().addInputMapperPair(inputPath, mapperGeneratorClassName); return _self(); }
default B addInputMapperPairs(final Map<String, String> inputMapperPairs) { _getOp().addInputMapperPairs(inputMapperPairs); return _self(); }
private void initialiseInput(final Job job, final MapReduce operation) throws IOException { if (null == avroSchemaFilePath) { throw new IllegalArgumentException("Avro schema file path has not been set"); } final Schema schema = new Parser().parse(new File(avroSchemaFilePath)); AvroJob.setInputKeySchema(job, schema); job.setInputFormatClass(AvroKeyInputFormat.class); for (final Map.Entry<String, String> entry : operation.getInputMapperPairs().entrySet()) { if (entry.getValue().contains(job.getConfiguration().get(MAPPER_GENERATOR))) { AvroKeyInputFormat.addInputPath(job, new Path(entry.getKey())); } } }
default void addInputMapperPair(final String inputPath, final String mapperGeneratorClassName) { if (null == getInputMapperPairs()) { Map<String, String> inputMapperMap = new HashMap<>(); inputMapperMap.put(inputPath, mapperGeneratorClassName); setInputMapperPairs(inputMapperMap); } else { getInputMapperPairs().put(inputPath, mapperGeneratorClassName); } }