public static String pack( Object object, Configuration conf ) { if( object == null ) return ""; try { return serializeBase64( object, conf, true ); } catch( IOException exception ) { throw new FlowException( "unable to pack object: " + object.getClass().getCanonicalName(), exception ); } }
private String pack( Object object, Configuration conf ) { try { return serializeBase64( object, conf, true ); } catch( IOException exception ) { throw new FlowException( "unable to pack object: " + object.getClass().getCanonicalName(), exception ); } }
private String pack( Object object, Configuration conf ) { try { return serializeBase64( object, conf, true ); } catch( IOException exception ) { throw new FlowException( "unable to pack object: " + object.getClass().getCanonicalName(), exception ); } }
public static String pack( Object object, Configuration conf ) { if( object == null ) return ""; try { return serializeBase64( object, conf, true ); } catch( IOException exception ) { throw new FlowException( "unable to pack object: " + object.getClass().getCanonicalName(), exception ); } }
public static <T> String serializeBase64( T object, Configuration conf, boolean compress ) throws IOException { ObjectSerializer objectSerializer; try { objectSerializer = instantiateSerializer( conf, object.getClass() ); } catch( ClassNotFoundException exception ) { throw new IOException( exception ); } return encodeBytes( objectSerializer.serialize( object, compress ) ); }
public static <T> String serializeBase64( T object, Configuration conf, boolean compress ) throws IOException { ObjectSerializer objectSerializer; try { objectSerializer = instantiateSerializer( conf, object.getClass() ); } catch( ClassNotFoundException exception ) { throw new IOException( exception ); } return encodeBytes( objectSerializer.serialize( object, compress ) ); }
toJob.set( "cascading.multiinputformats", HadoopUtil.serializeBase64( configs, toJob, true ) );
toJob.set( "cascading.multiinputformats", HadoopUtil.serializeBase64( configs, toJob, true ) );
@Override public void sinkConfInit(FlowProcess<JobConf> flowProcess, Tap<JobConf, RecordReader<Tuple, Tuple>, OutputCollector<Tuple, Tuple>> tap, JobConf conf) { // Pick temp location in HDFS for conf files. // TODO KKr - should I get rid of this temp directory when we're done? String coreDirname = _solrCoreDir.getName(); Path hdfsSolrCoreDir = new Path(Hfs.getTempPath(conf), "solr-core-" + Util.createUniqueID() + "/" + coreDirname); // Copy Solr core directory into HDFS. try { FileSystem fs = hdfsSolrCoreDir.getFileSystem(conf); fs.copyFromLocalFile(new Path(_solrCoreDir.getAbsolutePath()), hdfsSolrCoreDir); } catch (IOException e) { throw new TapException("Can't copy Solr core directory into HDFS", e); } conf.setOutputKeyClass(Tuple.class); conf.setOutputValueClass(Tuple.class); conf.setOutputFormat(SolrOutputFormat.class); try { conf.set(SolrOutputFormat.SINK_FIELDS_KEY, HadoopUtil.serializeBase64(getSinkFields(), conf)); } catch (IOException e) { throw new TapException("Can't serialize sink fields", e); } conf.set(SolrOutputFormat.SOLR_CORE_PATH_KEY, hdfsSolrCoreDir.toString()); conf.setInt(SolrOutputFormat.MAX_SEGMENTS_KEY, _maxSegments); conf.set(SolrOutputFormat.DATA_DIR_PROPERTY_NAME_KEY, _dataDirPropertyName); }