@Override public void allocate(Map<String, ValueVector> vectorMap) throws OutOfMemoryException { super.allocate(vectorMap); inner.allocate(mutator.getFieldVectorMap()); }
public SchemaConfig build() { return new SchemaConfig( new AuthorizationContext(username, ignoreAuthErrors), defaultSchema, optionManager, viewExpansionContext, exposeInternalSources, validityChecker); } }
private Catalog createCatalog(String userName) { return catalogService.getCatalog(SchemaConfig.newBuilder(userName).build()); }
@SuppressWarnings("unchecked") public <T extends StoragePlugin> T getStoragePlugin(StoragePluginId pluginId) throws ExecutionSetupException { StoragePlugin plugin = sources.getSource(pluginId); if(plugin == null){ return null; } return (T) plugin; } }
@Override public DatasetSplit apply(SplitWork input) { return input.getSplit(); }}).toList(); return new ElasticsearchSubScan(
@Override public final int writeBatch(int offset, int length) throws IOException { if (this.eventBasedRecordWriter == null) { this.eventBasedRecordWriter = new EventBasedRecordWriter(incoming, this); } return eventBasedRecordWriter.write(offset, length); }
@Override public SplitWork apply(DatasetSplit split) { return new SplitWork(split, nodeMap, affinityType); }}); }
/** * Gets dataset retrieval options as defined on the source. * * @return dataset retrieval options defined on the source */ DatasetRetrievalOptions getDefaultRetrievalOptions() { return DatasetRetrievalOptions.fromMetadataPolicy(metadataPolicy) .withFallback(DatasetRetrievalOptions.DEFAULT); }
/** * Creates a {@link Builder}. * * @return new builder */ public static Builder newBuilder() { return new Builder(); }
@Override public String toString() { return super.toString() + "[hadoopPath = " + hadoopPath + ", recordCount = " + recordCount + ", runningRecordCount = " + runningRecordCount + ", ...]"; }
public AbstractGroupScan( TableMetadata dataset, List<SchemaPath> columns) { super(dataset.getUser()); this.dataset = dataset; this.columns = columns; }
@Override public int read(byte[] b) throws IOException { int l = read(pos, b, 0, b.length); pos += l; return l; }
public EventBasedRecordWriter(VectorAccessible batch, RowBasedRecordWriter recordWriter) throws IOException { this.batch = batch; this.recordWriter = recordWriter; initFieldWriters(); }
/** * @return Should ignore if authorization errors are reported while {@link SchemaPlus} * instances interact with the underlying storage. */ public boolean getIgnoreAuthErrors() { return authContext.getIgnoreAuthErrors(); }
@Override public boolean seekToNewSource(long arg0) throws IOException { seek(arg0); return true; }
public final V getValue() throws IOException { if(e != null){ if(e instanceof IOException){ throw (IOException) e; }else{ throw convertToIOException(e); } } return value; }
/** * Create new builder. * @param username Name of the user accessing the storage sources. */ public static Builder newBuilder(final String username) { return new Builder(username); }
/** * @return User whom to impersonate as while creating {@link SchemaPlus} instances * interact with the underlying storage. */ public String getUserName() { return authContext.getUsername(); }