public FileSystem getFileSystem(HdfsContext context, Path path) throws IOException { return getFileSystem(context.getIdentity().getUser(), path, getConfiguration(context, path)); }
private List<String> computeFileNamesForMissingBuckets( ConnectorSession session, Table table, HiveStorageFormat storageFormat, Path targetPath, String filePrefix, int bucketCount, PartitionUpdate partitionUpdate) { if (partitionUpdate.getFileNames().size() == bucketCount) { // fast path for common case return ImmutableList.of(); } HdfsContext hdfsContext = new HdfsContext(session, table.getDatabaseName(), table.getTableName()); JobConf conf = toJobConf(hdfsEnvironment.getConfiguration(hdfsContext, targetPath)); String fileExtension = HiveWriterFactory.getFileExtension(conf, fromHiveStorageFormat(storageFormat)); Set<String> fileNames = ImmutableSet.copyOf(partitionUpdate.getFileNames()); ImmutableList.Builder<String> missingFileNamesBuilder = ImmutableList.builder(); for (int i = 0; i < bucketCount; i++) { String fileName = HiveWriterFactory.computeBucketedFileName(filePrefix, i) + fileExtension; if (!fileNames.contains(fileName)) { missingFileNamesBuilder.add(fileName); } } List<String> missingFileNames = missingFileNamesBuilder.build(); verify(fileNames.size() + missingFileNames.size() == bucketCount); return missingFileNames; }
@Test public void testIsViewFileSystem() { HdfsEnvironment hdfsEnvironment = createTestHdfsEnvironment(new HiveClientConfig()); Path viewfsPath = new Path("viewfs://ns-default/test-folder"); Path nonViewfsPath = new Path("hdfs://localhost/test-dir/test-folder"); // ViewFS check requires the mount point config hdfsEnvironment.getConfiguration(CONTEXT, viewfsPath).set("fs.viewfs.mounttable.ns-default.link./test-folder", "hdfs://localhost/app"); assertTrue(isViewFileSystem(CONTEXT, hdfsEnvironment, viewfsPath)); assertFalse(isViewFileSystem(CONTEXT, hdfsEnvironment, nonViewfsPath)); } }
entry -> session.getProperty(entry.getName(), entry.getJavaType()).toString())); Configuration conf = hdfsEnvironment.getConfiguration(new HdfsContext(session, schemaName, tableName), writePath); this.conf = toJobConf(conf);
Path path = new Path(hiveSplit.getPath()); Configuration configuration = hdfsEnvironment.getConfiguration(new HdfsContext(session, hiveSplit.getDatabase(), hiveSplit.getTable()), path);
private void createEmptyFile(ConnectorSession session, Path path, Table table, Optional<Partition> partition, List<String> fileNames) { JobConf conf = toJobConf(hdfsEnvironment.getConfiguration(new HdfsContext(session, table.getDatabaseName(), table.getTableName()), path)); Properties schema; StorageFormat format; if (partition.isPresent()) { schema = getHiveSchema(partition.get(), table); format = partition.get().getStorage().getStorageFormat(); } else { schema = getHiveSchema(table); format = table.getStorage().getStorageFormat(); } for (String fileName : fileNames) { writeEmptyFile(session, new Path(path, fileName), conf, schema, format.getSerDe(), format.getOutputFormat()); } }
Configuration configuration = hdfsEnvironment.getConfiguration(hdfsContext, path); InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, false); FileSystem fs = hdfsEnvironment.getFileSystem(hdfsContext, path);
public FileSystem getFileSystem(Path path) throws IOException { FileSystem fileSystem = path.getFileSystem(getConfiguration(path)); fileSystem.setVerifyChecksum(verifyChecksum); return fileSystem; } }
Optional<Path> writePath = locationService.writePathRoot(locationHandle); checkArgument(writePath.isPresent(), "CREATE TABLE must have a write path"); conf = new JobConf(hdfsEnvironment.getConfiguration(writePath.get())); conf = new JobConf(hdfsEnvironment.getConfiguration(hdfsEnvironmentPath));
@Test public void testIsViewFileSystem() { HdfsEnvironment hdfsEnvironment = createTestHdfsEnvironment(new HiveClientConfig()); Path viewfsPath = new Path("viewfs://ns-default/test-folder"); Path nonViewfsPath = new Path("hdfs://localhost/test-dir/test-folder"); // ViewFS check requires the mount point config hdfsEnvironment.getConfiguration(CONTEXT, viewfsPath).set("fs.viewfs.mounttable.ns-default.link./test-folder", "hdfs://localhost/app"); assertTrue(isViewFileSystem(CONTEXT, hdfsEnvironment, viewfsPath)); assertFalse(isViewFileSystem(CONTEXT, hdfsEnvironment, nonViewfsPath)); } }
long length = hiveSplit.getLength(); Configuration configuration = hdfsEnvironment.getConfiguration(path);
Configuration configuration = hdfsEnvironment.getConfiguration(path); InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, false);