/** * Create the supporting directory for this plugin if it doesn't yet exist. * @throws IOException */ private void createIfNecessary() throws IOException { if(!config.createIfMissing()) { return; } try { // no need to exists here as FileSystemWrapper does an exists check and this is a noop if already existing. fs.mkdirs(config.getPath()); } catch (IOException ex) { try { if(fs.exists(config.getPath())) { // race creation, ignore. return; } } catch (IOException existsFailure) { // we're doing the check above to detect a race condition. if we fail, ignore the failure and just fall through to throwing the originally caught exception. ex.addSuppressed(existsFailure); } throw new IOException(String.format("Failure to create directory %s.", config.getPath().toString()), ex); } }
public FileSystemPlugin(final C config, final SabotContext context, final String name, FileSystemWrapper fs, Provider<StoragePluginId> idProvider) { this.name = name; this.config = config; this.idProvider = idProvider; this.fs = fs; this.context = context; this.fsConf = getNewFsConf(); this.lpPersistance = context.getLpPersistence(); this.basePath = config.getPath(); }
ReflectionManager(SabotContext sabotContext, JobsService jobsService, NamespaceService namespaceService, OptionManager optionManager, ReflectionGoalsStore userStore, ReflectionEntriesStore reflectionStore, ExternalReflectionStore externalReflectionStore, MaterializationStore materializationStore, DependencyManager dependencyManager, DescriptorCache descriptorCache, Set<ReflectionId> reflectionsToUpdate, WakeUpCallback wakeUpCallback, Supplier<ExpansionHelper> expansionHelper) { this.sabotContext = Preconditions.checkNotNull(sabotContext, "sabotContext required"); this.jobsService = Preconditions.checkNotNull(jobsService, "jobsService required"); this.namespaceService = Preconditions.checkNotNull(namespaceService, "namespaceService required"); this.optionManager = Preconditions.checkNotNull(optionManager, "optionManager required"); this.userStore = Preconditions.checkNotNull(userStore, "reflection user store required"); this.reflectionStore = Preconditions.checkNotNull(reflectionStore, "reflection store required"); this.externalReflectionStore = Preconditions.checkNotNull(externalReflectionStore); this.materializationStore = Preconditions.checkNotNull(materializationStore, "materialization store required"); this.dependencyManager = Preconditions.checkNotNull(dependencyManager, "dependency manager required"); this.descriptorCache = Preconditions.checkNotNull(descriptorCache, "descriptor cache required"); this.reflectionsToUpdate = Preconditions.checkNotNull(reflectionsToUpdate, "reflections to update required"); this.wakeUpCallback = Preconditions.checkNotNull(wakeUpCallback, "wakeup callback required"); this.expansionHelper = Preconditions.checkNotNull(expansionHelper, "sqlConvertSupplier required"); final FileSystemPlugin accelerationPlugin = sabotContext.getCatalogService() .getSource(ReflectionServiceImpl.ACCELERATOR_STORAGEPLUGIN_NAME); accelerationBasePath = accelerationPlugin.getConfig().getPath(); }
public JobResultsStore(final FileSystemPlugin plugin, final IndexedStore<JobId, JobResult> store, final BufferAllocator allocator) throws IOException { this.storageName = plugin.getName(); this.dfs = plugin.getFS(ImpersonationUtil.getProcessUserName()); this.jobStoreLocation = plugin.getConfig().getPath(); this.dfs.mkdirs(jobStoreLocation); this.store = store; this.allocator = allocator; this.jobResults = CacheBuilder.newBuilder() .maximumSize(100) .expireAfterAccess(15, TimeUnit.MINUTES) .build( new CacheLoader<JobId, JobData>() { @Override public JobData load(JobId key) throws Exception { final JobDataImpl jobDataImpl = new JobDataImpl(new LateJobLoader(key), key); return newJobDataReference(jobDataImpl); } }); }
@Override public void start() throws Exception { store = new ConfigurationStore(kvStoreProvider.get()); ClusterIdentity identity; Optional<ClusterIdentity> clusterIdentity = getClusterIdentityFromStore(store, kvStoreProvider.get()); if (!clusterIdentity.isPresent()) { // this is a new cluster, generating a new cluster identifier. identity = new ClusterIdentity() .setIdentity(UUID.randomUUID().toString()) .setVersion(toClusterVersion(VERSION)) .setCreated(System.currentTimeMillis()); identity = storeIdentity(identity); } else { identity = clusterIdentity.get(); } this.identity = identity; FileSystemPlugin supportPlugin = catalogServiceProvider.get().getSource(LOCAL_STORAGE_PLUGIN); Preconditions.checkNotNull(supportPlugin); final String supportPathURI = supportPlugin.getConfig().getPath().toString(); supportPath = new File(supportPathURI).toPath(); }
private DatasetDownloadManager downloadManager() { final FileSystemPlugin downloadPlugin = catalogService.getSource(DATASET_DOWNLOAD_STORAGE_PLUGIN); return new DatasetDownloadManager(jobsService, namespaceService, downloadPlugin.getConfig().getPath(), downloadPlugin.getFs()); } private void validate(DatasetPath path, VirtualDatasetUI ds) {
try { try { files = DotFileUtil.getDotFiles(getFS(schemaConfig.getUserName()), config.getPath(), tableSchemaPath.get(tableSchemaPath.size() - 1), DotFileType.VIEW); } catch (AccessControlException e) { if (!schemaConfig.getIgnoreAuthErrors()) {
@Override public SourceState getState() { final FileSystemWrapper fs = getFS(ImpersonationUtil.getProcessUserName()); if (!fs.isPdfs()) { try { fs.listStatus(config.getPath()); return SourceState.GOOD; } catch (Exception e) { return SourceState.badState(e); } } else { return SourceState.GOOD; } }
@Test public void testCTASAndDropTable() throws Exception { // Create a table SqlQuery ctas = getQueryFromSQL("CREATE TABLE \"$scratch\".\"ctas\" AS select * from cp.\"json/users.json\" LIMIT 1"); Job ctasJob = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(ctas) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); ctasJob.getData().loadIfNecessary(); FileSystemPlugin plugin = (FileSystemPlugin) getCurrentDremioDaemon().getBindingProvider().lookup(CatalogService.class).getSource("$scratch"); // Make sure the table data files exist File ctasTableDir = new File(plugin.getConfig().getPath().toString(), "ctas"); assertTrue(ctasTableDir.exists()); assertTrue(ctasTableDir.list().length >= 1); // Now drop the table SqlQuery dropTable = getQueryFromSQL("DROP TABLE \"$scratch\".\"ctas\""); Job dropTableJob = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(dropTable) .setQueryType(QueryType.ACCELERATOR_DROP) .build(), NoOpJobStatusListener.INSTANCE); dropTableJob.getData().loadIfNecessary(); // Make sure the table data directory is deleted assertFalse(ctasTableDir.exists()); }
final File ctasTableDir = new File(plugin.getConfig().getPath().toString(), "ctas"); assertTrue(ctasTableDir.exists()); assertTrue(ctasTableDir.list().length >= 1);