@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { String database; if (context != null) { database = context.getJobConf().get("hive.current.database"); } else { database = SessionState.get().getCurrentDatabase(); } return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.stringTypeInfo, new Text(database)); }
@Override public void configure(MapredContext context) { Configuration conf = context.getJobConf(); }
@Override public void configure(MapredContext mapredContext) { JobConf jobconf = mapredContext.getJobConf(); String seed = jobconf.get(HivemallConstants.CONFKEY_RAND_AMPLIFY_SEED); this.useSeed = (seed != null); if(useSeed) { this.seed = Long.parseLong(seed); } }
@Override public void configure(MapredContext mapredContext) { JobConf jobconf = mapredContext.getJobConf(); String seed = jobconf.get(HivemallConstants.CONFKEY_RAND_AMPLIFY_SEED); this.useSeed = (seed != null); if (useSeed) { this.seed = Long.parseLong(seed); } }
@Override public void configure(MapredContext context) { super.configure(context); conf = context.getJobConf(); soi = PrimitiveObjectInspectorFactory.javaStringObjectInspector; doi = LazyPrimitiveObjectInspectorFactory.LAZY_DOUBLE_OBJECT_INSPECTOR; log.info("{} configured. Model base dir path: {}", UDF_NAME, conf.get(ModelLoader.MODEL_PATH_BASE_DIR)); } }
@Override public void configure(MapredContext context) { super.configure(context); if (context != null) { JobConf conf = context.getJobConf(); String tdJarVersion = conf.get("td.jar.version"); if (tdJarVersion != null) { this.support_javascript_eval = false; } } }
@Override public void configure(MapredContext context) { super.configure(context); if (context != null) { JobConf conf = context.getJobConf(); String tdJarVersion = conf.get("td.jar.version"); if (tdJarVersion != null) { this.support_javascript_eval = false; } } }
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { String database; if (context != null) { database = context.getJobConf().get("hive.current.database"); } else { database = SessionState.get().getCurrentDatabase(); } return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector( TypeInfoFactory.stringTypeInfo, new Text(database)); }
public static BufferedReader getBufferedReader(File file, MapredContext context) throws IOException { URI fileuri = file.toURI(); Path path = new Path(fileuri); Configuration conf = context.getJobConf(); CompressionCodecFactory ccf = new CompressionCodecFactory(conf); CompressionCodec codec = ccf.getCodec(path); if (codec == null) { return new BufferedReader(new FileReader(file)); } else { Decompressor decompressor = CodecPool.getDecompressor(codec); FileInputStream fis = new FileInputStream(file); CompressionInputStream cis = codec.createInputStream(fis, decompressor); BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor); return br; } }
public static BufferedReader getBufferedReader(File file, MapredContext context) throws IOException { URI fileuri = file.toURI(); Path path = new Path(fileuri); Configuration conf = context.getJobConf(); CompressionCodecFactory ccf = new CompressionCodecFactory(conf); CompressionCodec codec = ccf.getCodec(path); if (codec == null) { return new BufferedReader(new FileReader(file)); } else { Decompressor decompressor = CodecPool.getDecompressor(codec); FileInputStream fis = new FileInputStream(file); CompressionInputStream cis = codec.createInputStream(fis, decompressor); BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor); return br; } }
public static BufferedReader getBufferedReader(File file, MapredContext context) throws IOException { URI fileuri = file.toURI(); Path path = new Path(fileuri); Configuration conf = context.getJobConf(); CompressionCodecFactory ccf = new CompressionCodecFactory(conf); CompressionCodec codec = ccf.getCodec(path); if(codec == null) { return new BufferedReader(new FileReader(file)); } else { Decompressor decompressor = CodecPool.getDecompressor(codec); FileInputStream fis = new FileInputStream(file); CompressionInputStream cis = codec.createInputStream(fis, decompressor); BufferedReader br = new BufferedReaderExt(new InputStreamReader(cis), decompressor); return br; } }
public static int getTaskId() { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { throw new IllegalStateException("MapredContext is not set"); } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { throw new IllegalStateException("JobConf is not set"); } int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); if (taskid == -1) { throw new IllegalStateException( "Both mapred.task.partition and mapreduce.task.partition are not set: " + toString(jobconf)); } } return taskid; }
public static int getTaskId() { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { throw new IllegalStateException("MapredContext is not set"); } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { throw new IllegalStateException("JobConf is not set"); } int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); if (taskid == -1) { throw new IllegalStateException( "Both mapred.task.partition and mapreduce.task.partition are not set: " + toString(jobconf)); } } return taskid; }
public Text evaluate(@Nullable final String regexKey) throws HiveException { MapredContext ctx = MapredContextAccessor.get(); if(ctx == null) { throw new HiveException("MapredContext is not set"); } JobConf jobconf = ctx.getJobConf(); if(jobconf == null) { throw new HiveException("JobConf is not set"); } String dumped = HadoopUtils.toString(jobconf, regexKey); return val(dumped); }
public static String getUniqueTaskIdString() { MapredContext ctx = MapredContextAccessor.get(); if (ctx != null) { JobConf jobconf = ctx.getJobConf(); if (jobconf != null) { int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); } if (taskid != -1) { return String.valueOf(taskid); } } } return RandomUtils.getUUID(); }
public Text evaluate(@Nullable final String regexKey) throws HiveException { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { throw new HiveException("MapredContext is not set"); } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { throw new HiveException("JobConf is not set"); } String dumped = HadoopUtils.toString(jobconf, regexKey); return val(dumped); }
public Text evaluate(@Nullable final String regexKey) throws HiveException { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { throw new HiveException("MapredContext is not set"); } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { throw new HiveException("JobConf is not set"); } String dumped = HadoopUtils.toString(jobconf, regexKey); return val(dumped); }
public static String getUniqueTaskIdString() { MapredContext ctx = MapredContextAccessor.get(); if (ctx != null) { JobConf jobconf = ctx.getJobConf(); if (jobconf != null) { int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); } if (taskid != -1) { return String.valueOf(taskid); } } } return RandomUtils.getUUID(); }
public static int getTaskId(final int defaultValue) { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { return defaultValue; } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { return defaultValue; } int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); if (taskid == -1) { return defaultValue; } } return taskid; }
public static int getTaskId(final int defaultValue) { MapredContext ctx = MapredContextAccessor.get(); if (ctx == null) { return defaultValue; } JobConf jobconf = ctx.getJobConf(); if (jobconf == null) { return defaultValue; } int taskid = jobconf.getInt("mapred.task.partition", -1); if (taskid == -1) { taskid = jobconf.getInt("mapreduce.task.partition", -1); if (taskid == -1) { return defaultValue; } } return taskid; }