@Override public int hashCode() { int result = platform.hashCode(); result = 31 * result + getName().hashCode(); result = 31 * result + metadata.hashCode(); return result; }
/** * Serialize to a string map * * @deprecated use {@link Descriptor#serialize(Descriptor)} */ @Deprecated public Map<String, String> toDataMap() { Map<String, String> map = Maps.newHashMap(); map.put(PLATFORM_KEY, platform); map.put(NAME_KEY, getName()); map.putAll(metadata); return map; }
/** * @deprecated use {@link #copy()} */ @Deprecated public DatasetDescriptor(DatasetDescriptor copy) { super(copy.getName()); platform = copy.getPlatform(); metadata.putAll(copy.getMetadata()); }
@Override public DatasetDescriptor resolve(DatasetDescriptor raw, State state) { ImmutableMap<String, String> metadata = raw.getMetadata(); Preconditions.checkArgument(metadata.containsKey(DatasetConstants.FS_SCHEME), String.format("Hive Dataset Descriptor must contain metadata %s to create Hdfs Dataset Descriptor", DatasetConstants.FS_SCHEME)); Preconditions.checkArgument(metadata.containsKey(DatasetConstants.FS_SCHEME), String.format("Hive Dataset Descriptor must contain metadata %s to create Hdfs Dataset Descriptor", DatasetConstants.FS_LOCATION)); DatasetDescriptor datasetDescriptor = new DatasetDescriptor(metadata.get(DatasetConstants.FS_SCHEME), metadata.get(DatasetConstants.FS_LOCATION)); datasetDescriptor.addMetadata(HIVE_TABLE, raw.getName()); return datasetDescriptor; } }
@Test public void testDatasetDescriptor() { DatasetDescriptor dataset = new DatasetDescriptor("hdfs", "/data/tracking/PageViewEvent"); dataset.addMetadata("fsUri", "hdfs://test.com:2018"); DatasetDescriptor copy = dataset.copy(); Assert.assertEquals(copy.getName(), dataset.getName()); Assert.assertEquals(copy.getPlatform(), dataset.getPlatform()); Assert.assertEquals(copy.getMetadata(), dataset.getMetadata()); Assert.assertEquals(dataset, copy); Assert.assertEquals(dataset.hashCode(), copy.hashCode()); }
copyableFile.setFsDatasets(originFs, targetFs); DatasetDescriptor source = (DatasetDescriptor) copyableFile.getSourceData(); Assert.assertEquals(source.getName(), "/data/databases/source"); Assert.assertEquals(source.getPlatform(), "hdfs"); Assert.assertEquals(source.getMetadata().get("fsUri"), originFsUri); DatasetDescriptor destination = (DatasetDescriptor) copyableFile.getDestinationData(); Assert.assertEquals(destination.getName(), "/data/databases/destination"); Assert.assertEquals(destination.getPlatform(), "file"); Assert.assertEquals(destination.getMetadata().get("fsUri"), targetFsUri); copyableFile.setFsDatasets(originFs, targetFs); source = (DatasetDescriptor) copyableFile.getSourceData(); Assert.assertEquals(source.getName(), "/data/databases/source/profile"); Assert.assertEquals(source.getPlatform(), "hdfs"); Assert.assertEquals(source.getMetadata().get("fsUri"), originFsUri); destination = (DatasetDescriptor) copyableFile.getDestinationData(); Assert.assertEquals(destination.getName(), "/data/databases/destination/profile"); Assert.assertEquals(destination.getPlatform(), "file"); Assert.assertEquals(destination.getMetadata().get("fsUri"), targetFsUri);
GSON.fromJson(props.getProperty("gobblin.event.lineage.source"), DatasetDescriptor.class); Assert.assertEquals(sourceDD.getPlatform(), "file"); Assert.assertEquals(sourceDD.getName(), "/tmp/test"); Assert.assertEquals(sourceDD.getMetadata().get(HiveToHdfsDatasetResolver.HIVE_TABLE), "db1.tb1"); (DatasetDescriptor) firstDescriptor(props, "gobblin.event.lineage.branch.1.destination"); Assert.assertEquals(destDD1.getPlatform(), "file"); Assert.assertEquals(destDD1.getName(), "/tmp/data_nestedOrc/db1/tb1/final"); Assert.assertEquals(destDD1.getMetadata().get(HiveToHdfsDatasetResolver.HIVE_TABLE), "db1_nestedOrcDb.tb1_nestedOrc"); (DatasetDescriptor) firstDescriptor(props, "gobblin.event.lineage.branch.2.destination"); Assert.assertEquals(destDD2.getPlatform(), "file"); Assert.assertEquals(destDD2.getName(), "/tmp/data_flattenedOrc/db1/tb1/final"); Assert.assertEquals(destDD2.getMetadata().get(HiveToHdfsDatasetResolver.HIVE_TABLE), "db1_flattenedOrcDb.tb1_flattenedOrc");
@Override public int hashCode() { int result = platform.hashCode(); result = 31 * result + getName().hashCode(); result = 31 * result + metadata.hashCode(); return result; }
/** * Serialize to a string map * * @deprecated use {@link Descriptor#serialize(Descriptor)} */ @Deprecated public Map<String, String> toDataMap() { Map<String, String> map = Maps.newHashMap(); map.put(PLATFORM_KEY, platform); map.put(NAME_KEY, getName()); map.putAll(metadata); return map; }
/** * @deprecated use {@link #copy()} */ @Deprecated public DatasetDescriptor(DatasetDescriptor copy) { super(copy.getName()); platform = copy.getPlatform(); metadata.putAll(copy.getMetadata()); }
@Override public DatasetDescriptor resolve(DatasetDescriptor raw, State state) { ImmutableMap<String, String> metadata = raw.getMetadata(); Preconditions.checkArgument(metadata.containsKey(DatasetConstants.FS_SCHEME), String.format("Hive Dataset Descriptor must contain metadata %s to create Hdfs Dataset Descriptor", DatasetConstants.FS_SCHEME)); Preconditions.checkArgument(metadata.containsKey(DatasetConstants.FS_SCHEME), String.format("Hive Dataset Descriptor must contain metadata %s to create Hdfs Dataset Descriptor", DatasetConstants.FS_LOCATION)); DatasetDescriptor datasetDescriptor = new DatasetDescriptor(metadata.get(DatasetConstants.FS_SCHEME), metadata.get(DatasetConstants.FS_LOCATION)); datasetDescriptor.addMetadata(HIVE_TABLE, raw.getName()); return datasetDescriptor; } }