public void setType(ArrayList<SemanticType> type) { if(type.size() > 0) { types.put(type.get(0).getHNodeId(), type); } } }
public void removeType(SemanticType type) { ArrayList<SemanticType> typeArr = getSemanticTypeForHNodeId(type.getHNodeId()); typeArr.remove(type); types.put(type.getHNodeId(), typeArr); }
public void addType(SemanticType type) { ArrayList<SemanticType> typeArr = getSemanticTypeForHNodeId(type.getHNodeId()); typeArr.add(type); types.put(type.getHNodeId(), typeArr); }
private static void writeSemanticType(JsonWriter writer, SemanticType semanticType) throws IOException { if (semanticType == null) return; String nullStr = null; writer.beginObject(); writer.name("hNodeId").value(semanticType.getHNodeId()); writer.name("domain"); if (semanticType.getDomain() == null) writer.value(nullStr); else writeLabel(writer, semanticType.getDomain()); writer.name("type"); if (semanticType.getType() == null) writer.value(nullStr); else writeLabel(writer, semanticType.getType()); writer.name("origin").value(semanticType.getOrigin().toString()); writer.name("confidenceScore").value(semanticType.getConfidenceScore()); writer.endObject(); }
String hNodeId = newTypes.get(0).getHNodeId(); for (HNodePath path : paths) { if (path.getLeaf().getId().equals(hNodeId)) {
private void saveSemanticTypesInformation(Worksheet worksheet, Workspace workspace , Collection<SemanticType> semanticTypes) throws JSONException { JSONArray typesArray = new JSONArray(); // Add the vworksheet information JSONObject vwIDJObj = new JSONObject(); vwIDJObj.put(ClientJsonKeys.name.name(), ParameterType.worksheetId.name()); vwIDJObj.put(ClientJsonKeys.type.name(), ParameterType.worksheetId.name()); vwIDJObj.put(ClientJsonKeys.value.name(), worksheetId); typesArray.put(vwIDJObj); for (SemanticType type: semanticTypes) { // Add the hNode information JSONObject hNodeJObj = new JSONObject(); hNodeJObj.put(ClientJsonKeys.name.name(), ParameterType.hNodeId.name()); hNodeJObj.put(ClientJsonKeys.type.name(), ParameterType.hNodeId.name()); hNodeJObj.put(ClientJsonKeys.value.name(), type.getHNodeId()); typesArray.put(hNodeJObj); // Add the semantic type information JSONObject typeJObj = new JSONObject(); typeJObj.put(ClientJsonKeys.name.name(), ClientJsonKeys.SemanticType.name()); typeJObj.put(ClientJsonKeys.type.name(), ParameterType.other.name()); typeJObj.put(ClientJsonKeys.value.name(), type.getJSONArrayRepresentation()); typesArray.put(typeJObj); } setInputParameterJson(typesArray.toString()); }
private void saveSemanticTypesInformation(Worksheet worksheet, Workspace workspace , Collection<SemanticType> semanticTypes) throws JSONException { JSONArray typesArray = new JSONArray(); // Add the vworksheet information JSONObject vwIDJObj = new JSONObject(); vwIDJObj.put(ClientJsonKeys.name.name(), ParameterType.worksheetId.name()); vwIDJObj.put(ClientJsonKeys.type.name(), ParameterType.worksheetId.name()); vwIDJObj.put(ClientJsonKeys.value.name(), worksheetId); typesArray.put(vwIDJObj); for (SemanticType type: semanticTypes) { // Add the hNode information JSONObject hNodeJObj = new JSONObject(); hNodeJObj.put(ClientJsonKeys.name.name(), ParameterType.hNodeId.name()); hNodeJObj.put(ClientJsonKeys.type.name(), ParameterType.hNodeId.name()); hNodeJObj.put(ClientJsonKeys.value.name(), type.getHNodeId()); typesArray.put(hNodeJObj); // Add the semantic type information JSONObject typeJObj = new JSONObject(); typeJObj.put(ClientJsonKeys.name.name(), ClientJsonKeys.SemanticType.name()); typeJObj.put(ClientJsonKeys.type.name(), ParameterType.other.name()); typeJObj.put(ClientJsonKeys.value.name(), type.getJSONArrayRepresentation()); typesArray.put(typeJObj); } setInputParameterJson(typesArray.toString(4)); }
SRIDHNodeId = type.getHNodeId(); spatialHNodeIds.add(0, SRIDHNodeId); } else if (hasType(type, POINT_POS_PROPERTY, POINT_CLASS) && pointFeatureHNodeId == "") { spatialHNodeIds.add(0, type.getHNodeId()); pointFeatureHNodeId = type.getHNodeId(); } else if (hasType(type, WGS84_LAT_PROPERTY, POINT_CLASS) && pointFeatureLatHNodeId == "") { spatialHNodeIds.add(0, type.getHNodeId()); pointFeatureLatHNodeId = type.getHNodeId(); } else if (hasType(type, WGS84_LNG_PROPERTY, POINT_CLASS) && pointFeatureLonHNodeId == "") { spatialHNodeIds.add(0, type.getHNodeId()); pointFeatureLonHNodeId = type.getHNodeId(); } else if (hasType(type, POS_LIST_PROPERTY, LINE_CLASS) && lineFeatureHNodeId == "") { spatialHNodeIds.add(0, type.getHNodeId()); lineFeatureHNodeId = type.getHNodeId(); } else if (hasType(type, POS_LIST_PROPERTY, POLYGON_CLASS) && polygonFeatureHNodeId == "") { spatialHNodeIds.add(0, type.getHNodeId()); polygonFeatureHNodeId = type.getHNodeId(); } else if (hasType(type, KML_CATEGORY_PROPERTY, KML_CUSTOMIZATION_CLASS)) { kmlCategoryHNodeId = type.getHNodeId(); if (kmlLabelHNodeId == "") { for (SemanticType synonymType : worksheet kmlLabelHNodeId = type.getHNodeId();
modeledColumnTable.put(type.getHNodeId(),"");
public void setLearnedSemanticTypes(List<SemanticType> learnedSemanticTypes) { double sum = 0.0; // normalizing the confidence scores if (learnedSemanticTypes != null) { for (SemanticType st : learnedSemanticTypes) { sum += st.getConfidenceScore() != null ? st.getConfidenceScore().doubleValue() : 0.0; } double confidence; this.learnedSemanticTypes = new ArrayList<>(); for (SemanticType st : learnedSemanticTypes) { confidence = st.getConfidenceScore() != null ? st.getConfidenceScore() : 0.0; SemanticType semType = new SemanticType(st.getHNodeId(), st.getType(), st.getDomain(), st.getDomainId(), st.isProvenance(), st.getOrigin(), confidence / sum); this.learnedSemanticTypes.add(semType); } } if (this.learnedSemanticTypes != null) Collections.sort(this.learnedSemanticTypes, Collections.reverseOrder()); }
coordinateHNodeIds.add(type.getHNodeId()); populatePoints(coordinateHNodeIds, currentCase, getRows(), getColumnMap()); coordinateHNodeIds.add(type.getHNodeId()); latFound = true; coordinateHNodeIds.set(0, type.getHNodeId()); currentCase = CoordinateCase.POINT_LAT_LNG; populatePoints(coordinateHNodeIds, currentCase, getRows(), coordinateHNodeIds.clear(); } else { coordinateHNodeIds.add(type.getHNodeId()); lngFound = true; coordinateHNodeIds.add(0, type.getHNodeId()); currentCase = CoordinateCase.POINT_POS; populatePoints(coordinateHNodeIds, currentCase, getRows(), coordinateHNodeIds.add(0, type.getHNodeId()); currentCase = CoordinateCase.LINE_POS_LIST; populateLines(coordinateHNodeIds, getRows(), getColumnMap()); coordinateHNodeIds.add(0, type.getHNodeId()); currentCase = CoordinateCase.POLYGON_POS_LIST; populatePolygons(coordinateHNodeIds, getRows(), getColumnMap());
modeledColumnTable.put(type.getHNodeId(),"");
HashMap<String, SemanticType> semanticIdMap = new HashMap<>(); for(SemanticType type : oldSemanticTypes) { String semId = LinkIdFactory.getLinkId(type.getType().getUri(), type.getDomainId(), type.getHNodeId()); semanticIdMap.put(semId, type);
.getSynonymTypesForHNodeId(newType.getHNodeId());
private void writeType(SemanticType type, JSONWriter writer, Map<String, ColumnNode> hNodeIdTocolumnNodeMap) { if (type != null && type.getConfidenceLevel() != SemanticType.ConfidenceLevel.Low) { ColumnNode alignmentColumnNode = hNodeIdTocolumnNodeMap.get(type.getHNodeId());