private void createTestFiles(String base) throws IOException { store.storeEmptyFile(base + "/file1"); store.storeEmptyFile(base + "/dir/file2"); store.storeEmptyFile(base + "/dir/file3"); }
@Override public void initialize(URI uri, Configuration conf) throws IOException { if (store == null) { store = createDefaultStore(conf); } store.initialize(uri, conf); setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this); }
public synchronized void seek(long pos) throws IOException { in.close(); in = store.retrieve(key, pos); this.pos = pos; } public synchronized long getPos() throws IOException {
FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) { PartialListing listing = store.list(key, 1, null); if (listing.getFiles().length > 0 || listing.getCommonPrefixes().length > 0) {
boolean srcIsFile = existsAndIsFile(src); if (srcIsFile) { store.rename(srcKey, dstKey); } else { store.delete(srcKey + FOLDER_SUFFIX); store.storeEmptyFile(dstKey + FOLDER_SUFFIX); PartialListing listing = store.listAll(srcKey, S3_MAX_LISTING_LENGTH, priorLastKey); for (FileMetadata file : listing.getFiles()) { store.rename(file.getKey(), dstKey + file.getKey().substring(srcKey.length()));
"src is file, so doing copy then delete in S3"); store.copy(srcKey, dstKey); store.delete(srcKey); } else { if(LOG.isDebugEnabled()) { LOG.debug(debugPreamble + "src is directory, so copying contents"); store.storeEmptyFile(dstKey + FOLDER_SUFFIX); PartialListing listing = store.list(srcKey, S3_MAX_LISTING_LENGTH, priorLastKey, true); for (FileMetadata file : listing.getFiles()) { keysToDelete.add(file.getKey()); store.copy(file.getKey(), dstKey + file.getKey().substring(srcKey.length())); store.delete(key); store.delete(srcKey + FOLDER_SUFFIX); } catch (FileNotFoundException e) {
@Override public synchronized void close() throws IOException { if (closed) { return; } backupStream.close(); try { byte[] md5Hash = digest == null ? null : digest.digest(); store.storeFile(key, backupFile, md5Hash); } finally { if (!backupFile.delete()) { LOG.warn("Could not delete temporary s3n file: " + backupFile); } super.close(); closed = true; } }
@Override public boolean delete(Path f, boolean recursive) throws IOException { FileStatus status; try { status = getFileStatus(f); } catch (FileNotFoundException e) { return false; } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); if (status.isDir()) { FileStatus[] contents = listStatus(f); if (!recursive && contents.length > 0) { throw new IOException("Directory " + f.toString() + " is not empty."); } for (FileStatus p : contents) { if (!delete(p.getPath(), recursive)) { return false; } } store.delete(key + FOLDER_SUFFIX); } else { store.delete(key); } return true; }
FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) { PartialListing listing = store.list(key, 1, null); if (listing.getFiles().length > 0 || listing.getCommonPrefixes().length > 0) {
boolean srcIsFile = existsAndIsFile(src); if (srcIsFile) { store.rename(srcKey, dstKey); } else { store.delete(srcKey + FOLDER_SUFFIX); store.storeEmptyFile(dstKey + FOLDER_SUFFIX); PartialListing listing = store.listAll(srcKey, S3_MAX_LISTING_LENGTH, priorLastKey); for (FileMetadata file : listing.getFiles()) { store.rename(file.getKey(), dstKey + file.getKey().substring(srcKey.length()));
@Override public synchronized void close() throws IOException { if (closed) { return; } backupStream.close(); try { byte[] md5Hash = digest == null ? null : digest.digest(); store.storeFile(key, backupFile, md5Hash); } finally { if (!backupFile.delete()) { LOG.warn("Could not delete temporary s3n file: " + backupFile); } super.close(); closed = true; } }
@Override public boolean delete(Path f, boolean recursive) throws IOException { FileStatus status; try { status = getFileStatus(f); } catch (FileNotFoundException e) { return false; } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); if (status.isDir()) { FileStatus[] contents = listStatus(f); if (!recursive && contents.length > 0) { throw new IOException("Directory " + f.toString() + " is not empty."); } for (FileStatus p : contents) { if (!delete(p.getPath(), recursive)) { return false; } } store.delete(key + FOLDER_SUFFIX); } else { store.delete(key); } return true; }
LOG.debug("getFileStatus retrieving metadata for key '" + key + "'"); FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { if(LOG.isDebugEnabled()) { if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) { if(LOG.isDebugEnabled()) { LOG.debug("getFileStatus returning 'directory' for key '" + key + LOG.debug("getFileStatus listing key '" + key + "'"); PartialListing listing = store.list(key, 1); if (listing.getFiles().length > 0 || listing.getCommonPrefixes().length > 0) {
public void testEmptyFile() throws Exception { store.storeEmptyFile("test/hadoop/file1"); fs.open(path("/test/hadoop/file1")).close(); }
public synchronized void seek(long pos) throws IOException { in.close(); in = store.retrieve(key, pos); this.pos = pos; } public synchronized long getPos() throws IOException {
@Override public void initialize(URI uri, Configuration conf) throws IOException { super.initialize(uri, conf); if (store == null) { store = createDefaultStore(conf); } store.initialize(uri, conf); setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this); }
@Override public synchronized void close() throws IOException { if (closed) { return; } backupStream.close(); LOG.info("OutputStream for key '{}' closed. Now beginning upload", key); try { byte[] md5Hash = digest == null ? null : digest.digest(); store.storeFile(key, backupFile, md5Hash); } finally { if (!backupFile.delete()) { LOG.warn("Could not delete temporary s3n file: " + backupFile); } super.close(); closed = true; } LOG.info("OutputStream for key '{}' upload complete", key); }
@Override public FileStatus getFileStatus(Path f) throws IOException { Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); if (key.length() == 0) { // root always exists return newDirectory(absolutePath); } FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { return newFile(meta, absolutePath); } if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) { return newDirectory(absolutePath); } PartialListing listing = store.list(key, 1); if (listing.getFiles().length > 0 || listing.getCommonPrefixes().length > 0) { return newDirectory(absolutePath); } throw new FileNotFoundException(absolutePath + ": No such file or directory."); }
public void testDirWithDifferentMarkersWorks() throws Exception { for (int i = 0; i < 3; i++) { String base = "test/hadoop" + i; Path path = path("/" + base); createTestFiles(base); if (i == 0 ) { //do nothing, we are testing correctness with no markers } else if (i == 1) { // test for _$folder$ marker store.storeEmptyFile(base + "_$folder$"); store.storeEmptyFile(base + "/dir_$folder$"); } else if (i == 2) { // test the end slash file marker store.storeEmptyFile(base + "/"); store.storeEmptyFile(base + "/dir/"); } else if (i == 3) { // test both markers store.storeEmptyFile(base + "_$folder$"); store.storeEmptyFile(base + "/dir_$folder$"); store.storeEmptyFile(base + "/"); store.storeEmptyFile(base + "/dir/"); } assertTrue(fs.getFileStatus(path).isDirectory()); assertEquals(2, fs.listStatus(path).length); } }