/** * Ctor * @throws ConfigurationException * @throws AnnotatorException */ public DefaultSentenceAnnotator(ConfigurationParams annotationParams) throws AnnotatorException, ConfigurationException { this( annotationParams.getFile(TransformationsConfigurationParametersNames.ANNOTATION_RULES_FILE)); }
public CatvarLexicalResource(ConfigurationParams params) throws LexicalResourceException, ConfigurationException { this(params.getFile(PARAM_CATVAR_FILE_NAME)); }
private ImmutableMap<String, Nominalization> createNomlexMapFromConfigurationParams(ConfigurationParams params) throws ConfigurationException, NomlexException { File nomlexFile = params.getFile(NOMLEX_FILE_PARAMETER_NAME); File classRoleTableFile = params.getFile(CLASS_ROLE_TABLE_PARAMETER_NAME); NomlexMapBuilder nomlexMapBuilder = new NomlexMapBuilder(nomlexFile.getPath(),classRoleTableFile.getPath()); nomlexMapBuilder.build(); return nomlexMapBuilder.getNomlexMap(); }
public static UnigramProbabilityEstimation getUnigramProbabilityEstimation(ConfigurationParams enginemlParams) throws TeEngineMlException { try { File unigramModelSerFile = enginemlParams.getFile(RTE_ENGINE_UNIGRAM_LIDSTON_SER_FILE); logger.info("Loading unigram model from file: "+unigramModelSerFile.getPath()); return MLELidstonSmoothedUnigramProbabilityEstimation.fromSerializedFile(unigramModelSerFile); } catch (IOException | ClassNotFoundException | ConfigurationException e) { throw new TeEngineMlException("Could not load UnigramProbabilityEstimation",e); } }
public NewNormalizerBasedTextPreProcessor(ConfigurationParams params) throws ConfigurationException, TextPreprocessorException { File normalizerFile = params.getFile(PREPROCESS_NEW_NORMALIZER_FILE); try { normalizer = new BiuNormalizer(normalizerFile); } catch(Exception e) { throw new TextPreprocessorException("Failed to initialize new BiuNormalizer.",e); } }
public VerbOceanLexicalResource(ConfigurationParams params) throws LexicalResourceException, ConfigurationException { this( params.getDouble(PARAM_SCORE_THRESHOLD), params.getFile(PARAM_FILE), params.getEnumSet(RelationType.class, PARAM_ALLOWED_RELATIONS)); } /////////////////////////////////////////////////////////////// PUBLIC ////////////////////////////////////////////////////////////
@Override protected ClassifierGenerator createClassifierGenerator() throws BiuteeException { try { File searchModelFile = configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_SEARCH_MODEL); File predictionsModelFile = configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_PREDICTIONS_MODEL); return new F1ClassifierGenerator(teSystemEnvironment.getClassifierFactory(), teSystemEnvironment.getFeatureVectorStructureOrganizer(),searchModelFile,predictionsModelFile); } catch (ConfigurationException e) { throw new BiuteeException("Could not create ClassifierGenerator. Please see nested exception.",e); } }
@Override protected ClassifierGenerator createClassifierGenerator() throws BiuteeException { try { File searchModelFile = configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_SEARCH_MODEL); File predictionsModelFile = configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_PREDICTIONS_MODEL); return new AccuracyClassifierGenerator(teSystemEnvironment.getClassifierFactory(), teSystemEnvironment.getFeatureVectorStructureOrganizer(),searchModelFile,predictionsModelFile); } catch (ConfigurationException e) { throw new BiuteeException("Could not create ClassifierGenerator. Please see nested exception.",e); } }
@SuppressWarnings("unchecked") private RteSumDatasetContents createDatasetContents() throws BiuteeException, ConfigurationException, FileNotFoundException, IOException, Rte6mainIOException, ClassNotFoundException, TeEngineMlException, AnnotatorException, TreeStringGeneratorException, TreeCoreferenceInformationException File serFile = configurationParams.getFile(serialized_parameterName); logger.info("Reading all topic from serialization file: "+serFile.getPath()); ObjectInputStream serStream = new ObjectInputStream(new FileInputStream(serFile));
File serializedDataFile = configurationParams.getFile(parameterName); logger.info("Loading dataset from serialization file :"+serializedDataFile.getPath()); RTESerializedPairsReader pairsReader = new RTESerializedPairsReader(
public WikiLexicalResource(ConfigurationParams params) throws LexicalResourceException, ConfigurationException { this( params.getFile(PARAM_STOP_WORDS), WikiExtractionType.parseExtractionTypeListOfStrings(params.getString(PARAM_EXTRACTION_TYPES)), params.getString(PARAM_DB_CONN_STRING), null, null, params.getDouble(PARAM_COOCURRENCE_THRESHOLD)); }
public WikiLexicalResourceIT(ConfigurationParams params) throws LexicalResourceException, ConfigurationException { // super( this( params.getFile(PARAM_STOP_WORDS), WikiExtractionType.parseExtractionTypeListOfStrings(params.getString(PARAM_EXTRACTION_TYPES)), params.getString(PARAM_DB_CONN_STRING), null, null, params.getDouble(PARAM_COOCURRENCE_THRESHOLD)); // wikiDbServices = new WikiLexicalResourceDBServicesThreadSafeIT(params.getString(PARAM_DB_CONN_STRING), null, null, COOCURENCE_THRESHOLD, WikiExtractionType.parseExtractionTypeListOfStrings(params.getString(PARAM_EXTRACTION_TYPES))); }
/** * Create an initialized NamedEntityRecognizer using the given configuration params. */ public static NamedEntityRecognizer createNamedEntityRecognizer(ConfigurationParams params) throws InstrumentCombinationException{ boolean doNer; try { doNer = (params.containsKey("do_named_entity_recognition")? params.getBoolean("do_named_entity_recognition"): false); if (doNer) { NamedEntityRecognizer ner = new StanfordNamedEntityRecognizer(new File(params.getFile("ner-classifier-path").getAbsolutePath())); ner.init(); return ner; } else return null; } catch (ConfigurationException e) { throw new InstrumentCombinationException("Nested exception with configuration file while initializing the NER", e); } catch (NamedEntityRecognizerException e) { throw new InstrumentCombinationException("Nested exception while initializing the NER", e); } } }
ConfigurationParams compilationParams = confFile.getModuleConfiguration(RuleCompilerParameterNames.RULE_COMPILER_PARAMS_MODULE); ConfigurationParams annotationParams = confFile.getModuleConfiguration(TransformationsConfigurationParametersNames.TRUTH_TELLER_MODULE_NAME); File dir = compilationParams.getFile(RuleCompilerParameterNames.ANNOTATION_RUELS_DIRECTORY); //new File(props.getProperty("annotationDirectoryName").trim()); final String ruleFileSuffix = compilationParams.get(RuleCompilerParameterNames.RULE_FILE_SUFFIX); //props.getProperty("graphFileSuffix").trim(); String outFileName = annotationParams.getFile(TransformationsConfigurationParametersNames.ANNOTATION_RULES_FILE).getName(); FileOutputStream fos = new FileOutputStream(outFileName); ObjectOutputStream out = new ObjectOutputStream(fos);
File modelForSearch = this.configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_SEARCH_MODEL); classifierForSearch = SafeClassifiersIO.loadLinearClassifier(this.teSystemEnvironment.getFeatureVectorStructureOrganizer(), modelForSearch); File modelForPredictions = this.configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_PREDICTIONS_MODEL); classifierForPredictions = SafeClassifiersIO.load(this.teSystemEnvironment.getFeatureVectorStructureOrganizer(), modelForPredictions); logger.info("Loading learning models and constructing classifiers - done.");
lemmatizerRulesFileName = configurationParams.getFile(RTE_ENGINE_GATE_LEMMATIZER_RULES_FILE).getAbsolutePath(); if (LEMMATIZER_SINGLE_INSTANCE) File stopWordsFile = transformationsParams.getFile(RTE_TRAIN_AND_TEST_STOP_WORDS); StopWordsFileLoader stopWordsLoader = new StopWordsFileLoader(stopWordsFile.getPath()); stopWordsLoader.load();
String stanfordNERClassifierPath = params.getFile(PREPROCESS_STANFORD_NE_CLASSIFIER_PATH).getAbsolutePath(); boolean doNer = true; if (params.containsKey(PREPROCESS_DO_NER))
File serFile = params.getFile(TransformationsConfigurationParametersNames.DIRT_LIKE_SER_FILE_PARAMETER_NAME); return new DirtDBRuleBase(serFile,ruleBaseName,parser);
Set<RuleWithConfidenceAndDescription<Info, BasicNode>> rulesWithConfidenceAndDescription; try { ObjectInputStream ois = new ObjectInputStream(new FileInputStream(resourceParams.getFile(TransformationsConfigurationParametersNames.SYNTACTIC_RULES_FILE))); try
break; case XML_MODEL: this.classifier = SafeClassifiersIO.loadLinearClassifier(teSystemEnvironment.getFeatureVectorStructureOrganizer(), configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_SEARCH_MODEL)); break; default: throw new TeEngineMlException("Loading from labeled-samples is no longer supported."); break; case XML_MODEL: File modelFile = configurationParams.getFile(ConfigurationParametersNames.RTE_TEST_PREDICTIONS_MODEL); this.classifierForPredictions = SafeClassifiersIO.load(teSystemEnvironment.getFeatureVectorStructureOrganizer(), modelFile); break;