private static void doSomething(Context context, PerformingInterface perform) throws IOException { String traceId = null, tagId = null; for (Record record : context.getContext().readCacheTable("subscribe")) { getTraceIdAndTagIdFromRecord(record, traceId, tagId); if (traceSet.contains(traceId) == false) continue; if (!checkTagIdIsNumber(tagId)) { context.getCounter("Error", "tag_id not a number").increment(1); continue; } perform.accept(tagId, traceId); } }
private static void imYourNewMethod(Context context,Boolean isAddTag){ String traceId = null, tagId = null; for (Record record : context.getContext().readCacheTable("subscribe")) { getTraceIdAndTagIdFromRecord(record, traceId, tagId); if (traceSet.contains(traceId) == false) continue; if (!checkTagIdIsNumber(tagId)) { context.getCounter("Error", "tag_id not a number").increment(1); continue; } if(isAddTag){ Vector<String> ret = traceListMap.get(tagId); if (ret == null) { ret = new Vector<String>(); } ret.add(traceId); traceListMap.put(tagId, ret); }else{ tagSet.add(tagId); } }
@Override public List<Map> getCountsFromRollups(Experiment.ID experimentID, Parameters parameters) throws RepositoryException { try { //build and execute SQL queries for counts from rollups String sqlQuery = "select day, bucket_label as bid, cumulative as c, action, impression_count as ic, " + "impression_user_count as iuc, action_count as ac, action_user_count as auc " + "from experiment_rollup where experiment_id = ? and context = ? order by day asc"; return transaction.select(sqlQuery, experimentID, parameters.getContext().getContext()); } catch (Exception e) { throw new RepositoryException("error reading counts from MySQL rollups", e); } }
List params = new ArrayList(); params.add(experimentID); params.add(parameters.getContext().getContext());
private static Stream<Record> validRecords(Context context) throws IOException { return context.getContext().readCacheTable("subscribe").stream() .filter(r -> { if (!traceSet.contains(traceId(r))) { return false; } try { Integer.parseInt(tagId(r)); return true; } catch (NumberFormatException e) { context.getCounter("Error", "tag_id not a number").increment(1); return false; } }); } private static String traceId(Record record) { return record.get("trace_id").toString(); } private static String tagId(Record record) { return record.get("tag_id").toString(); }
@Override public List<Map> getImpressionRows(Experiment.ID experimentID, Parameters parameters) throws RepositoryException { try { //build and execute SQL queries for counts Date from_ts = parameters.getFromTime(); Date to_ts = parameters.getToTime(); String sqlBase = "bucket_label as bid, count(user_id) as c, count(distinct user_id) as cu"; String sqlParams = " where experiment_id = ? and context = ?"; List params = new ArrayList(); params.add(experimentID); params.add(parameters.getContext().getContext()); if (from_ts != null) { params.add(from_ts); sqlParams += " and timestamp >= ?"; } if (to_ts != null) { params.add(to_ts); sqlParams += " and timestamp <= ?"; } Object[] bucketSqlData = new Object[params.size()]; params.toArray(bucketSqlData); String sqlImpressions = "select " + sqlBase + " from event_impression" + sqlParams + " group by bucket_label"; List<Map> impressionRows = transaction.select(sqlImpressions, bucketSqlData); return impressionRows; } catch (Exception e) { throw new RepositoryException("error reading actions rows from MySQL", e); } }
LOGGER.debug("Query user assignment export for experimentID={}, at dateHour={}", experimentID.getRawID(), dateHour); if (ignoreNullBucket) { result = userAssignmentExportAccessor.selectBy(experimentID.getRawID(), dateHour, context.getContext(), false); } else { result = userAssignmentExportAccessor.selectBy(experimentID.getRawID(), dateHour, context.getContext());
parameters.getContext().getContext());
List params = new ArrayList(); params.add(experimentID); params.add(parameters.getContext().getContext());
@Override public List<Map> getRollupRows(Experiment.ID experimentId, String rollupDate, Parameters parameters) throws RepositoryException { // TODO enable direct mapping of DateMidnight List rollupRows; try { //build and execute SQL queries for counts from rollups String sqlQuery = "select bucket_label as bid, action, impression_count as ic, impression_user_count as iuc, " + "action_count as ac, action_user_count as auc from experiment_rollup " + "where experiment_id = ? and cumulative = ? and day = ? and context = ?"; rollupRows = transaction.select(sqlQuery, experimentId, true, rollupDate, parameters.getContext().getContext()); return rollupRows; } catch (Exception e) { throw new RepositoryException("error reading rollup rows from MySQL", e); } }
/** * Populate existing user assignments for given user, application & context. * This method make use of provided experimentMap to eliminate the call to database to fetch experiment object. * * @param userID User Id * @param appLabel Application Label * @param context Environment context * @param experimentMap experiment map to fetch experiment label * @return List of assignments in term of pair of Experiment & Bucket label. */ @Override @Timed public List<Pair<Experiment, String>> getAssignments(User.ID userID, Application.Name appLabel, Context context, Map<Experiment.ID, Experiment> experimentMap) { final Stream<ExperimentUserByUserIdContextAppNameExperimentId> experimentUserStream = getUserIndexStream(userID.toString(), appLabel.toString(), context.getContext()); List<Pair<Experiment, String>> result = new ArrayList<>(); experimentUserStream.forEach((ExperimentUserByUserIdContextAppNameExperimentId t) -> { Experiment exp = experimentMap.get(Experiment.ID.valueOf(t.getExperimentId())); if (nonNull(exp)) { result.add(new ImmutablePair<>(exp, Optional.ofNullable(t.getBucket()).orElseGet(() -> "null"))); } else { LOGGER.debug("{} experiment id is not present in the experimentMap...", t.getExperimentId()); } }); return result; }
/** * Adds the context attribute to the profile. It also adds the header attributes to profile using a helper method. * * @param segmentationProfile The segmentation profile object containing various segments. * @param headers The http headers that need to be merged into segmentation profile as new segments. * @param context The context object containing the context, for example, PROD or QA. * @return Returns The merged segmentation profile object containing new segments from headers and context objects. */ protected SegmentationProfile mergeHeaderAndContextWithProfile(SegmentationProfile segmentationProfile, HttpHeaders headers, Context context) { SegmentationProfile updatedSegmentationProfile = mergeHeaderWithProfile(segmentationProfile, headers); if (updatedSegmentationProfile == null) { Map profileMap = new HashMap(); updatedSegmentationProfile = new SegmentationProfile.Builder(profileMap).build(); } updatedSegmentationProfile.addAttribute("context", context.getContext()); return updatedSegmentationProfile; }
void removeIndexExperimentsToUser(User.ID userID, Experiment.ID experimentID, Context context, Application.Name appName) { try { experimentUserIndexAccessor.deleteBy(userID.toString(), experimentID.getRawID(), context.getContext(), appName.toString()); } catch (WriteTimeoutException | UnavailableException | NoHostAvailableException e) { throw new RepositoryException( "Could not delete index from experiment_user_index for user: " + userID + "to experiment: " + experimentID, e); } }
+ experimentID + "\" userID = \"" + userID + " context=\"" + context.getContext() + "\""); });
void indexExperimentsToUser(Assignment assignment) { try { if (isNull(assignment.getBucketLabel())) { experimentUserIndexAccessor.insertBy( assignment.getUserID().toString(), assignment.getContext().getContext(), assignment.getApplicationName().toString(), assignment.getExperimentID().getRawID() ); } else { experimentUserIndexAccessor.insertBy( assignment.getUserID().toString(), assignment.getContext().getContext(), assignment.getApplicationName().toString(), assignment.getExperimentID().getRawID(), assignment.getBucketLabel().toString() ); } } catch (WriteTimeoutException | UnavailableException | NoHostAvailableException e) { throw new RepositoryException("Could not index experiment to user \"" + assignment + "\"", e); } }
@Override public void assignUserToExports(Assignment assignment, Date date) { final DateHour dateHour = new DateHour(); dateHour.setDateHour(date); //TODO: why is this not derived from assignment.getCreated() instead? final Date day_hour = dateHour.getDayHour(); try { if (isNull(assignment.getBucketLabel())) { userAssignmentExportAccessor.insertBy(assignment.getExperimentID().getRawID(), assignment.getUserID().toString(), assignment.getContext().getContext(), date, day_hour, "NO_ASSIGNMENT", true); } else { userAssignmentExportAccessor.insertBy(assignment.getExperimentID().getRawID(), assignment.getUserID().toString(), assignment.getContext().getContext(), date, day_hour, assignment.getBucketLabel().toString(), false); } } catch (WriteTimeoutException | UnavailableException | NoHostAvailableException e) { throw new RepositoryException("Could not save user assignment in user_assignment_export \"" + assignment + "\"", e); } }
? event.getContext().getContext() : "PROD";