@Test public void lessItemsThanBatchSize() throws Exception { FastByIDMap<PreferenceArray> userData = new FastByIDMap<PreferenceArray>(); userData.put(1, new GenericUserPreferenceArray(Arrays.asList(new GenericPreference(1, 1, 1), new GenericPreference(1, 2, 1), new GenericPreference(1, 3, 1)))); userData.put(2, new GenericUserPreferenceArray(Arrays.asList(new GenericPreference(2, 1, 1), new GenericPreference(2, 2, 1), new GenericPreference(2, 4, 1)))); DataModel dataModel = new GenericDataModel(userData); ItemBasedRecommender recommender = new GenericItemBasedRecommender(dataModel, new TanimotoCoefficientSimilarity(dataModel)); BatchItemSimilarities batchSimilarities = new MultithreadedBatchItemSimilarities(recommender, 10); batchSimilarities.computeItemSimilarities(1, 1, mock(SimilarItemsWriter.class)); }
@Test public void higherDegreeOfParallelismThanBatches() throws Exception { FastByIDMap<PreferenceArray> userData = new FastByIDMap<PreferenceArray>(); userData.put(1, new GenericUserPreferenceArray(Arrays.asList(new GenericPreference(1, 1, 1), new GenericPreference(1, 2, 1), new GenericPreference(1, 3, 1)))); userData.put(2, new GenericUserPreferenceArray(Arrays.asList(new GenericPreference(2, 1, 1), new GenericPreference(2, 2, 1), new GenericPreference(2, 4, 1)))); DataModel dataModel = new GenericDataModel(userData); ItemBasedRecommender recommender = new GenericItemBasedRecommender(dataModel, new TanimotoCoefficientSimilarity(dataModel)); BatchItemSimilarities batchSimilarities = new MultithreadedBatchItemSimilarities(recommender, 10); try { // Batch size is 100, so we only get 1 batch from 3 items, but we use a degreeOfParallelism of 2 batchSimilarities.computeItemSimilarities(2, 1, mock(SimilarItemsWriter.class)); fail(); } catch (IOException e) {} } }