/** * Uploads a local file to HDFS * This method is not thread safe * * @param source * @param conf * @return * @throws IOException */ public static URI uploadToHDFS(URI source, HiveConf conf) throws IOException { Path localFile = new Path(source.getPath()); Path remoteFile = new Path(SessionState.get().getSparkSession().getHDFSSessionDir(), getFileName(source)); FileSystem fileSystem = FileSystem.get(remoteFile.toUri(), conf); // Overwrite if the remote file already exists. Whether the file can be added // on executor is up to spark, i.e. spark.files.overwrite fileSystem.copyFromLocalFile(false, true, localFile, remoteFile); Path fullPath = fileSystem.getFileStatus(remoteFile).getPath(); return fullPath.toUri(); }
/** * Uploads a local file to HDFS * * @param source * @param conf * @return * @throws IOException */ public static URI uploadToHDFS(URI source, HiveConf conf) throws IOException { Path localFile = new Path(source.getPath()); Path remoteFile = new Path(SessionState.get().getSparkSession().getHDFSSessionDir(), getFileName(source)); FileSystem fileSystem = FileSystem.get(conf); // Overwrite if the remote file already exists. Whether the file can be added // on executor is up to spark, i.e. spark.files.overwrite fileSystem.copyFromLocalFile(false, true, localFile, remoteFile); Path fullPath = fileSystem.getFileStatus(remoteFile).getPath(); return fullPath.toUri(); }
doAnswer(invocationOnMock -> activeSparkSession.get() ).when(mockSessionState).getSparkSession();
public static SparkSession getSparkSession(HiveConf conf, SparkSessionManager sparkSessionManager) throws HiveException { SessionState sessionState = SessionState.get(); synchronized (sessionState) { SparkSession sparkSession = sessionState.getSparkSession(); HiveConf sessionConf = sessionState.getConf(); // Spark configurations are updated close the existing session // In case of async queries or confOverlay is not empty, // sessionConf and conf are different objects if (sessionConf.getSparkConfigUpdated() || conf.getSparkConfigUpdated()) { sparkSessionManager.closeSession(sparkSession); sparkSession = null; conf.setSparkConfigUpdated(false); sessionConf.setSparkConfigUpdated(false); } sparkSession = sparkSessionManager.getSession(sparkSession, conf, true); sessionState.setSparkSession(sparkSession); return sparkSession; } }
public static SparkSession getSparkSession(HiveConf conf, SparkSessionManager sparkSessionManager) throws HiveException { SparkSession sparkSession = SessionState.get().getSparkSession(); HiveConf sessionConf = SessionState.get().getConf(); // Spark configurations are updated close the existing session // In case of async queries or confOverlay is not empty, // sessionConf and conf are different objects if (sessionConf.getSparkConfigUpdated() || conf.getSparkConfigUpdated()) { sparkSessionManager.closeSession(sparkSession); sparkSession = null; conf.setSparkConfigUpdated(false); sessionConf.setSparkConfigUpdated(false); } sparkSession = sparkSessionManager.getSession(sparkSession, conf, true); SessionState.get().setSparkSession(sparkSession); return sparkSession; }
@Test public void testGetSessionId() throws HiveException { SessionState ss = SessionState.start(SESSION_HIVE_CONF); SparkSessionManager ssm = SparkSessionManagerImpl.getInstance(); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_1", ss.getSparkSession().getSessionId()); ss = SessionState.start(SESSION_HIVE_CONF); ss.setSparkSession(ssm.getSession(null, SESSION_HIVE_CONF, true)); assertEquals(ss.getSessionId() + "_0", ss.getSparkSession().getSessionId()); }
console.printInfo("Total MapReduce CPU Time Spent: " + Utilities.formatMsecToStr(totalCpu)); SparkSession ss = SessionState.get().getSparkSession(); if (ss != null) { ss.onQueryCompletion(queryId);
QueryPlan.makeQueryId() : queryState.getQueryId(); SparkSession ss = SessionState.get().getSparkSession(); if (ss != null) { ss.onQuerySubmission(queryId);
sparkSession = oldSs.getSparkSession(); ss.setSparkSession(sparkSession); oldSs.setSparkSession(null);
sparkSession = oldSs.getSparkSession(); ss.setSparkSession(sparkSession); oldSs.setSparkSession(null);
public static SparkSession getSparkSession(HiveConf conf, SparkSessionManager sparkSessionManager) throws HiveException { SparkSession sparkSession = SessionState.get().getSparkSession(); // Spark configurations are updated close the existing session if (conf.getSparkConfigUpdated()) { sparkSessionManager.closeSession(sparkSession); sparkSession = null; conf.setSparkConfigUpdated(false); } sparkSession = sparkSessionManager.getSession(sparkSession, conf, true); SessionState.get().setSparkSession(sparkSession); return sparkSession; } }