static void printEntities(LogHelper console, Set<?> entities, String prefix) { List<String> strings = new ArrayList<String>(); for (Object o : entities) { strings.add(o.toString()); } Collections.sort(strings); for (String s : strings) { console.printInfo(prefix + s, false); } } }
@Override public void printRowCount(String queryId) { QueryInfo ji = queryInfoMap.get(queryId); if (ji == null) { return; } for (String tab : ji.rowCountMap.keySet()) { console.printInfo(ji.rowCountMap.get(tab) + " Rows loaded to " + tab); } }
@Override public void renderReport(String report) { if (hiveServer2InPlaceProgressEnabled) { LOGGER.info(report); } else { monitor.console.printInfo(report); } } }
private void warn(String msg) { SessionState.getConsole().printInfo("Warning: " + msg, false); }
static boolean unregisterJar(List<String> jarsToUnregister) { LogHelper console = getConsole(); try { Utilities.removeFromClassPath(jarsToUnregister.toArray(new String[0])); console.printInfo("Deleted " + jarsToUnregister + " from class path"); return true; } catch (IOException e) { console.printError("Unable to unregister " + jarsToUnregister + "\nException: " + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return false; } }
static void registerJars(List<String> newJars) throws IllegalArgumentException { LogHelper console = getConsole(); try { ClassLoader loader = Thread.currentThread().getContextClassLoader(); ClassLoader newLoader = Utilities.addToClassPath(loader, newJars.toArray(new String[0])); Thread.currentThread().setContextClassLoader(newLoader); SessionState.get().getConf().setClassLoader(newLoader); console.printInfo("Added " + newJars + " to class path"); } catch (Exception e) { String message = "Unable to register " + newJars; throw new IllegalArgumentException(message, e); } }
private void warn(String msg) { SessionState.getConsole().printInfo( String.format("Warning: %s", msg)); }
@Override protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length > 0) { super.initializeInput(arguments); } else { if (currentTimestamp == null) { currentTimestamp = new LongWritable(0); setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().toEpochMilli())); String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead."; SessionState.getConsole().printInfo(msg, false); } } }
/** * Set the number of reducers for the spark work. */ private void printConfigInfo() throws IOException { console.printInfo("In order to change the average load for a reducer (in bytes):"); console.printInfo(" set " + HiveConf.ConfVars.BYTESPERREDUCER.varname + "=<number>"); console.printInfo("In order to limit the maximum number of reducers:"); console.printInfo(" set " + HiveConf.ConfVars.MAXREDUCERS.varname + "=<number>"); console.printInfo("In order to set a constant number of reducers:"); console.printInfo(" set " + HiveConf.ConfVars.HADOOPNUMREDUCERS + "=<number>"); }
ProgressMonitor progressMonitor(DAGStatus status, Map<String, Progress> progressMap) { try { return new TezProgressMonitor(dagClient, status, topSortedWorks, progressMap, console, executionStartTime); } catch (IOException | TezException e) { console.printInfo("Getting Progress Information: " + e.getMessage() + " stack trace: " + ExceptionUtils.getStackTrace(e)); } return TezProgressMonitor.NULL; } }
private TezSessionState getNewTezSessionOnError( TezSessionState oldSession) throws Exception { // Note: we don't pass the config to reopen. If the session was already open, it would // have kept running with its current config - preserve that behavior. TezSessionState newSession = oldSession.reopen(); console.printInfo("Session re-established."); return newSession; }
@Override void renderReport(String report) { if (hiveServer2InPlaceProgressEnabled) { LOGGER.info(report); } else { monitor.console.printInfo(report); } } }
public void shortPrint(final SessionState.LogHelper console) throws ExecutionException, InterruptedException { waitForReturnSessionEvent(); console.printInfo(WmContext.WM_EVENTS_TITLE, false); for (WmEvent wmEvent : getQueryWmEvents()) { console.printInfo("Event: " + wmEvent.getEventType() + " Pool: " + wmEvent.getWmTezSessionInfo().getPoolName() + " Cluster %: " + WmContext.DECIMAL_FORMAT.format(wmEvent.getWmTezSessionInfo().getClusterPercent())); } }
private void printAppInfo() { String sparkMaster = hiveConf.get("spark.master"); if (sparkMaster != null && sparkMaster.startsWith("yarn")) { String appID = sparkJobStatus.getAppID(); if (appID != null) { console.printInfo("Running with YARN Application = " + appID); console.printInfo("Kill Command = " + HiveConf.getVar(hiveConf, HiveConf.ConfVars.YARNBIN) + " application -kill " + appID); } } } }
private void consoleMessage(String message) { if (useConsole) { SessionState.getConsole().printInfo(message); } else { LOG.info(message); } }
private static void logTypeWarning(String colName, String colType) { String warning = "Only primitive type arguments are accepted but " + colType + " is passed for " + colName + "."; warning = "WARNING: " + warning; CONSOLE.printInfo(warning); }
/** * Logs info into the log file, and if the LogHelper is not silent then into the HiveServer2 or * HiveCli info stream too. Handles an extra detail which will not be printed if null. * BeeLine uses the operation log file to show the logs to the user, so depending on the * BeeLine settings it could be shown to the user. * @param info The log message * @param detail Extra detail to log which will be not printed if null */ public void printInfo(String info, String detail) { printInfo(info, detail, getIsSilent()); }
/** * Logs info into the log file, and if not silent then into the HiveServer2 or HiveCli info * stream too. The isSilent parameter is used instead of the LogHelper isSilent attribute. * BeeLine uses the operation log file to show the logs to the user, so depending on the * BeeLine settings it could be shown to the user. * @param info The log message * @param isSilent If true then the message will not be printed to the info stream */ public void printInfo(String info, boolean isSilent) { printInfo(info, null, isSilent); }
/** * Logs info into the log file, and if the LogHelper is not silent then into the HiveServer2 or * HiveCli info stream too. * BeeLine uses the operation log file to show the logs to the user, so depending on the * BeeLine settings it could be shown to the user. * @param info The log message */ public void printInfo(String info) { printInfo(info, null); }