/** * Clear mapping information, including strategy. */ public void clearMapping() { _strategy = null; _cols = Schemas.EMPTY_COLUMNS; _fk = null; _table = null; _info.clear(); setResolve(MODE_MAPPING | MODE_MAPPING_INIT, false); }
/** * Clear mapping information, including strategy. */ public void clearMapping() { _strategy = null; _cols = Schemas.EMPTY_COLUMNS; _fk = null; _table = null; _info.clear(); setResolve(MODE_MAPPING | MODE_MAPPING_INIT, false); }
/** * Clear mapping information, including strategy. */ public void clearMapping() { _strategy = null; _cols = Schemas.EMPTY_COLUMNS; _fk = null; _table = null; _info.clear(); setResolve(MODE_MAPPING | MODE_MAPPING_INIT, false); }
/** * Clear mapping information, including strategy. */ public void clearMapping() { _strategy = null; _cols = Schemas.EMPTY_COLUMNS; _fk = null; _table = null; _info.clear(); setResolve(MODE_MAPPING | MODE_MAPPING_INIT, false); }
/** * Clear mapping information, including strategy. */ public void clearMapping() { _strategy = null; _cols = Schemas.EMPTY_COLUMNS; _fk = null; _table = null; _info.clear(); setResolve(MODE_MAPPING | MODE_MAPPING_INIT, false); }
public void buildAnnotations() { Map output = new HashMap(); // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING | MODE_ANN_MAPPING, output); _annos = output; }
public void buildAnnotations() { Map output = new HashMap(); // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) { mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement()); } // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING | MODE_ANN_MAPPING, output); _annos = output; }
public void buildAnnotations() { Map output = new HashMap(); // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) { mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement()); } // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING | MODE_ANN_MAPPING, output); _annos = output; }
public void buildAnnotations() { Map output = new HashMap(); // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) { mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement()); } // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING | MODE_ANN_MAPPING, output); _annos = output; }
public void buildAnnotations() { Map output = new HashMap(); // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) { mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement()); } // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING | MODE_ANN_MAPPING, output); _annos = output; }
/** * Write the code for the tool. * * @param output if null, then perform the write directly * to the filesystem; otherwise, populate the * specified map with keys as the generated * {@link ClassMapping} and values as a * {@link String} that contains the generated code * @return the set of metadata {@link File}s that were written */ public Collection recordMetaData(boolean perClass, Map output) throws IOException { // pretend mappings are all resolved ClassMapping[] mappings = getMappings(); for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); // store in user's configured IO MetaDataFactory mdf = _conf.newMetaDataFactoryInstance(); mdf.setRepository(getRepository()); mdf.setStoreDirectory(_dir); if (perClass) mdf.setStoreMode(MetaDataFactory.STORE_PER_CLASS); mdf.store(mappings, new QueryMetaData[0], new SequenceMetaData[0], MODE_META | MODE_MAPPING, output); Set files = new TreeSet(); for (int i = 0; i < mappings.length; i++) if (mappings[i].getSourceFile() != null) files.add(mappings[i].getSourceFile()); return files; }
for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement());
for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement());
for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement());
for (int i = 0; i < mappings.length; i++) mappings[i].setResolve(MODE_META | MODE_MAPPING, true); mappings[i].setUseSchemaElement(getUseSchemaElement());