public ClearCredential() { FileInputStream fis = null; try { fis = new FileInputStream(CRED_FILE); final Properties props = new Properties(); props.load(fis); AWS_ACCESS_ID = props.getProperty("AWSACCESSID") != null ? props.getProperty("AWSACCESSID").trim() : ""; AWS_KEY = props.getProperty("AWSKEY") != null ? props.getProperty("AWSKEY").trim() : ""; } catch (Exception e) { logger.error("Exception with credential file ", e); throw new RuntimeException("Problem reading credential file. Cannot start.", e); } finally { FileUtils.closeQuietly(fis); } }
/** * Creates a directory * * @param dir * @throws IOException */ private static void mkdir(String dir) throws IOException { FileUtils.createDirectory(dir); }
private static void rmdir(String dir) throws IOException { File dirFile = new File(dir); if (dirFile.exists()) { FileUtils.deleteRecursive(new File(dir)); } }
public void write(Memory memory) { long newLength = ensureCapacity(memory.size()); buffer.put(length, memory, 0, memory.size()); length = newLength; }
@Override public void write(Memory memory, long offset, long length) throws IOException { for (ByteBuffer buffer : memory.asByteBuffers(offset, length)) write(buffer); }
@Override public BufferHolder rebuffer(long position) { offset = alignedPosition(position); source.readChunk(offset, buffer); return this; }
@Override public long getFilePointer() { if (buffer == null) // closed already return rebufferer.fileLength(); return current(); }
public void close() { regions.closeQuietly(); super.close(); } }
public static void cleanup() throws IOException { // clean up commitlog String[] directoryNames = { DatabaseDescriptor.getCommitLogLocation(), }; for (String dirName : directoryNames) { File dir = new File(dirName); if (!dir.exists()) throw new RuntimeException("No such directory: " + dir.getAbsolutePath()); FileUtils.deleteRecursive(dir); } // clean up data directory which are stored as data directory/table/data files for (String dirName : DatabaseDescriptor.getAllDataFileLocations()) { File dir = new File(dirName); if (!dir.exists()) throw new RuntimeException("No such directory: " + dir.getAbsolutePath()); FileUtils.deleteRecursive(dir); } }
public StaticMembership() throws IOException { Properties config = new Properties(); FileInputStream fis = null; try { fis = new FileInputStream(DEFAULT_PROP_PATH); config.load(fis); } catch (Exception e) { logger.error("Exception with static membership file ", e); throw new RuntimeException("Problem reading static membership file. Cannot start.", e); } finally { FileUtils.closeQuietly(fis); } String racName = config.getProperty(RAC_NAME); racCount = 0; for (String name : config.stringPropertyNames()) { if (name.startsWith(INSTANCES_PRE)) { racCount += 1; if (name.equals(INSTANCES_PRE + racName)) racMembership = Arrays.asList(config.getProperty(name).split(",")); } } }
/** * Copies a resource from within the jar to a directory. * * @param resource * @param directory * @throws IOException */ private static void copy(String resource, String directory) throws IOException { FileUtils.createDirectory(directory); InputStream is = CassandraAuthTest.class.getResourceAsStream(resource); String fileName = resource.substring(resource.lastIndexOf("/") + 1); File file = new File(directory + System.getProperty("file.separator") + fileName); OutputStream out = new FileOutputStream(file); byte buf[] = new byte[1024]; int len; while ((len = is.read(buf)) > 0) { out.write(buf, 0, len); } out.close(); is.close(); } }
@Override public void write(Memory memory, long offset, long length) throws IOException { for (ByteBuffer buffer : memory.asByteBuffers(offset, length)) write(buffer); }
public static void startCassandraInstance(String pathToDataDir) throws TTransportException, IOException, InterruptedException, SecurityException, IllegalArgumentException, NoSuchMethodException, IllegalAccessException, InvocationTargetException { if (cassandraStarted) { return; } try { FileUtils.deleteRecursive(new File(pathToDataDir)); } catch (AssertionError e) { // eat } embedded = new EmbeddedServerHelper(); try { embedded.setup(); } catch (ConfigurationException ce) { throw new RuntimeException(ce); } cassandraStarted = true; }
private void writeCassandraSnitchProperties() { final NodeType nodeType = dseConfig.getNodeType(); if (nodeType == NodeType.REAL_TIME_QUERY) return; Reader reader = null; try { String filePath = config.getCassHome() + "/conf/" + RACKDC_PROPERTY_FILENAME; reader = new FileReader(filePath); Properties properties = new Properties(); properties.load(reader); String suffix = ""; if (nodeType == NodeType.SEARCH) suffix = "_solr"; if (nodeType == NodeType.ANALYTIC_HADOOP) suffix = "_hadoop"; if (nodeType == NodeType.ANALYTIC_HADOOP_SPARK) suffix = "_hadoop_spark"; if (nodeType == NodeType.ANALYTIC_SPARK) suffix = "_spark"; properties.put("dc_suffix", suffix); properties.store(new FileWriter(filePath), ""); } catch (Exception e) { throw new RuntimeException("Unable to read " + RACKDC_PROPERTY_FILENAME, e); } finally { FileUtils.closeQuietly(reader); } }
FileUtils.closeQuietly(writer);
accessFilename, e.getMessage())); } finally { FileUtils.closeQuietly(in);
FileUtils.closeQuietly(in);