public DDFManager.EngineType getEngineType() { return this.getManager().getEngineType(); }
/** * @return The engine name we are built on, e.g., "spark" or "java_collections" */ public String getEngine() { return this.getManager().getEngine(); }
public void export2csv(String fileURL, String fieldSeparator, Boolean hasHead) throws DDFException { this.getManager().export2csv(this, fileURL, fieldSeparator, hasHead); }
private void validateName(String name) throws DDFException { Boolean isNameExisted; try { this.getManager().getDDFByName(name); isNameExisted = true; } catch (DDFException e) { isNameExisted = false; } if(isNameExisted) { throw new DDFException(String.format("DDF with name %s already exists", name)); } Pattern p = Pattern.compile("^[a-zA-Z0-9_-]*$"); Matcher m = p.matcher(name); if(!m.find()) { throw new DDFException(String.format("Invalid name %s, only allow alphanumeric (uppercase and lowercase a-z, " + "numbers 0-9) and dash (\"-\") and underscore (\"_\")", name)); } }
public DDFManager getManager() { return (this.getDDF() != null ? this.getDDF().getManager() : null); }
public DDF sql2ddf(String sqlCommand) throws DDFException { try { // sqlCommand = sqlCommand.replace("@this", this.getTableName()); sqlCommand = sqlCommand.replace("@this", "{1}"); sqlCommand = String.format(sqlCommand, "{1}"); SQLDataSourceDescriptor sqlDS = new SQLDataSourceDescriptor(sqlCommand, null, null,null, this.getUUID().toString()); return this.getManager().sql2ddf(sqlCommand, null, sqlDS); // return this.getManager().sql2ddf(sqlCommand); } catch (Exception e) { throw new DDFException(String.format("Error executing queries for ddf %s", this.getTableName()), e); } }
public SqlTypedResult sqlTyped(String sqlCommand, String errorMessage) throws DDFException { try { sqlCommand = sqlCommand.replace("@this", this.getTableName()); return this.getManager().sqlTyped(String.format(sqlCommand, this.getTableName())); } catch (Exception e) { throw new DDFException(String.format(errorMessage, this.getTableName()), e); } }
public SqlResult sql(String sqlCommand, String errorMessage) throws DDFException { try { // sqlCommand = sqlCommand.replace("@this", this.getTableName()); // TODO: what is format? // return this.getManager().sql(String.format(sqlCommand, this.getTableName())); sqlCommand = sqlCommand.replace("@this", "{1}"); sqlCommand = String.format(sqlCommand, "{1}"); SQLDataSourceDescriptor sqlDS = new SQLDataSourceDescriptor(sqlCommand, null, null,null, this.getUUID().toString()); return this.getManager().sql(sqlCommand, null, sqlDS); } catch (Exception e) { throw new DDFException(String.format(errorMessage, this.getTableName()), e); } }
@Override public void duplicate(String fromNamespace, String fromName, String toNamespace, String toName, boolean doOverwrite) throws DDFException { IPersistible from = this.load(fromNamespace, fromName); if (from instanceof DDF) { DDF to = (DDF) from; // to.setNamespace(toNamespace); to.getManager().setDDFName(to, toName); to.persist(); } else { throw new DDFException("Can only duplicate DDFs"); } }
private DDF createDDFWrapper() throws DDFException { DDF ddf = this.newContainerDDFImpl(); if (ddf == null) throw new DDFException(String.format("Cannot create new container DDF for %s: %s/%s", this.getClass(), this.getNamespace(), this.getName())); // Make sure we have a namespace and name if (Strings.isNullOrEmpty(this.getName())) this.setName(ddf.getSchemaHandler().newTableName(this)); // Make sure the DDF's names match ours ddf.getManager().setDDFName(ddf, this.getName()); return ddf; }
@Override public DDF residuals() throws DDFException { SparkDDF predictionDDF = (SparkDDF) this.getDDF(); JavaRDD<double[]> predictionRDD = predictionDDF.getJavaRDD(double[].class); JavaRDD<double[]> result = predictionRDD.map(new MetricsMapperResiduals()); if (result == null) mLog.error(">> javaRDD result of MetricMapper residuals is null"); if (predictionDDF.getManager() == null) mLog.error(">> predictionDDF.getManager() is null"); if (result.rdd() == null) mLog.error(">> result.rdd() is null"); if (predictionDDF.getSchema() == null) mLog.error(">> predictionDDF.getSchema() is null"); if (predictionDDF.getName() == null) mLog.error(">> predictionDDF.getName() is null"); Schema schema = new Schema("residuals double"); DDFManager manager = this.getDDF().getManager(); DDF residualDDF = manager .newDDF(manager, result.rdd(), new Class<?>[] { RDD.class, double[].class }, null, schema); if (residualDDF == null) mLog.error(">>>>>>>>>>>.residualDDF is null"); return residualDDF; }
@Override public DDF copyFrom(DDF ddf, String tgtname) throws DDFException { mLog.info(String.format(">>> Copy new ddf %s from ddf %s", tgtname, ddf.getName())); DDFManager fromManager = ddf.getManager(); DataSourceDescriptor dataSourceDescriptor = fromManager.getDataSourceDescriptor(); if (dataSourceDescriptor instanceof JDBCDataSourceDescriptor) { // It's a jdbc ddf. JDBCDataSourceDescriptor jdbcDS = (JDBCDataSourceDescriptor) dataSourceDescriptor; JDBCDataSourceDescriptor loadDS = new JDBCDataSourceDescriptor(jdbcDS.getDataSourceUri(), jdbcDS.getCredentials(), ddf.getTableName()); DDF tgtddf = this.load(loadDS); this.setDDFName(tgtddf, tgtname); return tgtddf; } else { throw new DDFException("Unsupported operation in copyFrom"); } }
String sql = buildPostgresFiveNumSql(column, this.getDDF().getTableName()); String[] ret = this.getDDF().getManager().sql(sql, false).getRows().get(0).split("\t"); System.arraycopy(ret, 0, rs, k, 5); k += 5;
HiveContext sqlContext = ((SparkDDFManager) this.getDDF().getManager()).getHiveContext(); for(Column column: categoricalColumns) { String sqlCmd = String.format("select distinct(%s) from %s where %s is not null", column.getName(), this.getDDF().getTableName(), column.getName());