public Datatype addDatatypeIfNotExists(Datatype datatype) { synchronized (datatypes) { Map<String, Datatype> m = datatypes.get(datatype.getDataverseName()); if (m == null) { m = new HashMap<>(); datatypes.put(datatype.getDataverseName(), m); } if (!m.containsKey(datatype.getDatatypeName())) { return m.put(datatype.getDatatypeName(), datatype); } return null; } }
public Datatype dropDatatype(Datatype datatype) { synchronized (datatypes) { Map<String, Datatype> m = datatypes.get(datatype.getDataverseName()); if (m == null) { return null; } return m.remove(datatype.getDatatypeName()); } }
private void confirmDatatypeIsUnusedByDatatypes(TxnId txnId, String dataverseName, String datatypeName) throws AlgebricksException, RemoteException { // If any datatype uses this type, throw an error // TODO: Currently this loads all types into memory. This will need to be fixed // for large numbers of types Datatype dataTypeToBeDropped = getDatatype(txnId, dataverseName, datatypeName); assert dataTypeToBeDropped != null; IAType typeToBeDropped = dataTypeToBeDropped.getDatatype(); List<Datatype> datatypes = getAllDatatypes(txnId); for (Datatype dataType : datatypes) { // skip types in different dataverses as well as the type to be dropped itself if (!dataType.getDataverseName().equals(dataverseName) || dataType.getDatatype().getTypeName().equals(datatypeName)) { continue; } AbstractComplexType recType = (AbstractComplexType) dataType.getDatatype(); if (recType.containsType(typeToBeDropped)) { throw new AlgebricksException("Cannot drop type " + dataverseName + "." + datatypeName + " being used by type " + dataverseName + "." + recType.getTypeName()); } } }
@Override public void addDatatype(MetadataTransactionContext ctx, Datatype datatype) throws AlgebricksException { try { metadataNode.addDatatype(ctx.getTxnId(), datatype); } catch (RemoteException e) { throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e); } try { ctx.addDatatype( metadataNode.getDatatype(ctx.getTxnId(), datatype.getDataverseName(), datatype.getDatatypeName())); } catch (RemoteException e) { throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e); } }
private void writeCollectionType(Datatype instance, AbstractComplexType type, DataOutput out) throws HyracksDataException { AbstractCollectionType listType = (AbstractCollectionType) type; IAType itemType = listType.getItemType(); if (itemType.getTypeTag().isDerivedType()) { handleNestedDerivedType(itemType.getTypeName(), (AbstractComplexType) itemType, instance, instance.getDataverseName(), instance.getDatatypeName()); } aString.setValue(listType.getItemType().getTypeName()); stringSerde.serialize(aString, out); }
aString.setValue(dataType.getDataverseName()); stringSerde.serialize(aString, tupleBuilder.getDataOutput()); tupleBuilder.addFieldEndOffset(); aString.setValue(dataType.getDataverseName()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
datatype.getDataverseName(), datatype.getDatatypeName(), new ARecordType(aRecType.getTypeName(), aRecType.getFieldNames(), aRecType.getFieldTypes(), aRecType.isOpen()), datatype.getIsAnonymous());
instance.getDataverseName(), instance.getDatatypeName());