@Override public void putNext(Tuple t) throws ExecException, IOException { try{ translatePigDataTypeToWritable(t, 0, key); translatePigDataTypeToWritable(t, 1, value); writer.write(key, value); }catch (Exception ex) { String message = "Unable to write key/value pair to output, key: " + key.getClass() + ", value: " + value.getClass() + ", writer " + writer + " ex " + ex; LOG.error(StackTraceExtractor.getStackTrace(ex).replaceAll("\n", " -- ")); LOG.error(message); throw new BackendException(message+" -- "+StackTraceExtractor.getStackTrace(ex)); } }
@Override public Map exec(Tuple input) throws IOException { try { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) input.get(0); DataBag db = (DataBag) input.get(1); String fieldName = (String) input.get(2); map.put(fieldName, db); return map; } catch (Exception e) { logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e)); } } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
private void logException(String exceptionClass, String path, Exception e) { LOGGER.error(exceptionClass + " for the xml {}", path, e); LOGGER.error(StackTraceExtractor.getStackTrace(e), path); if (myReporter != null) { myReporter .getCounter("Orcid Extraction MAJOR Problem", "Exception") .increment(1); myReporter.getCounter("Orcid Exctaction Summary", "TOTAL FAILURE") .increment(1); myReporter.getCounter("Orcid Exctaction Summary", "TOTAL") .increment(1); } } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
@Override public Map exec(Tuple input) throws IOException { try { DataByteArray protoMetadata = (DataByteArray) input.get(0); DocumentMetadata metadata = DocumentMetadata.parseFrom(protoMetadata.get()); String titles; String abstracts; List<String> titleList = new ArrayList<String>(); for (TextWithLanguage title : metadata.getBasicMetadata().getTitleList()) { titleList.add(title.getText()); } titles = Joiner.on(" ").join(titleList); List<String> abstractsList = new ArrayList<String>(); for (TextWithLanguage documentAbstract : metadata.getBasicMetadata().getTitleList()) { abstractsList.add(documentAbstract.getText()); } abstracts = Joiner.on(" ").join(abstractsList); Map<String, Object> map = new HashMap<String, Object>(); map.put("key", metadata.getKey()); map.put("title", titles); map.put("keywords", getConcatenated(metadata.getKeywordsList())); map.put("abstract", abstracts); map.put("categories", getCategories(metadata.getBasicMetadata().getClassifCodeList())); return map; } catch (Exception e) { logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e)); } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
@Override public Map exec(Tuple input) throws IOException { try { DataByteArray protoMetadata = (DataByteArray) input.get(0); int lim = (Integer) input.get(1); DocumentMetadata metadata = DocumentMetadata.parseFrom(protoMetadata.get()); if (language != null) { return generateConcreteLanguageMap(metadata, lim); } else { return generateAllLanguageMap(metadata, lim); } } catch (Exception e) { logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e)); } }
@Override public Tuple exec(Tuple input) throws IOException { if (input == null || input.size() == 0) { return null; } try { Object obj = (DataByteArray) input.get(0); DataByteArray dba = (DataByteArray) obj; DocumentMetadata dm = DocumentMetadata.parseFrom(dba.get()); String key = dm.getKey(); Object[] to = new Object[]{key}; return TupleFactory.getInstance().newTuple(Arrays.asList(to)); } catch (Exception e) { logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e)); } } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
@Override public Tuple exec(Tuple tuple) throws IOException { checkCorrectness(tuple); try{ DocumentWrapper.Builder dwb = mainBlockParsing(tuple); int i = -1; for(String s : actions){ i++; if(i == mainGroupIndex) continue; try { IMerge merger = (IMerge) Class.forName("pl.edu.icm.coansys.output.merge.all.strategies."+MergeMapping.hm.get(s)).newInstance(); dwb = merger.execute(tuple, 2*i+1, dwb); } catch (Exception e) { LOGGER.error(ERROR_STRING, e); } } Tuple result = tupleFactory.newTuple(); result.append(docId); result.append(new DataByteArray(dwb.build().toByteArray())); return result; }catch(IOException e){ LOGGER.error(StackTraceExtractor.getStackTrace(e), e); throw e; } }
@Override public Tuple exec(Tuple input) throws IOException { if (input == null || input.size() == 0) { return null; } try { Object obj = (DataByteArray) input.get(0); DataByteArray dba = (DataByteArray) obj; DocumentWrapper dm = DocumentWrapper.parseFrom(dba.get()); Object[] to = new Object[]{new DataByteArray(dm.getDocumentMetadata().toByteArray())}; return TupleFactory.getInstance().newTuple(Arrays.asList(to)); } catch (Exception e) { logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e)); } } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));
@Override public Schema outputSchema(Schema input) { try { Schema termSchema = new Schema(new Schema.FieldSchema("term", new Schema(new Schema.FieldSchema("value", DataType.CHARARRAY)), DataType.TUPLE)); return new Schema(new Schema.FieldSchema(getSchemaName(this .getClass().getName().toLowerCase(), input), termSchema, DataType.BAG)); } catch (Exception e) { log.error("Error in the output Schema creation", e); log.error(StackTraceExtractor.getStackTrace(e)); return null; } }
@Override public Schema outputSchema(Schema input) { try { Schema termSchema = new Schema(new Schema.FieldSchema("term", new Schema(new Schema.FieldSchema("value", DataType.CHARARRAY)), DataType.TUPLE)); return new Schema(new Schema.FieldSchema(getSchemaName(this.getClass().getName().toLowerCase(), input), termSchema, DataType.BAG)); } catch (Exception e) { log.error("Error in the output Schema creation",e); log.error(StackTraceExtractor.getStackTrace(e)); return null; } } private static final String SPACE = " ";
public Tuple exec(Tuple input) throws IOException { try { TupleFactory tf = TupleFactory.getInstance(); String docA = (String) input.get(0); String docB = (String) input.get(1); Tuple out = tf.newTuple(); if(docA.compareTo(docB)<0){ out.append(docA); out.append(docB); }else if(docA.compareTo(docB)>0){ out.append(docB); out.append(docA); }else{ throw new Exception("DocIdA == DocIdB"); } out.append(input.get(2)); return out; } catch (Exception e) { System.out.println(StackTraceExtractor.getStackTrace(e)); return null; } }
logger.error("Error in processing input row:", e); throw new IOException("Caught exception processing input row:\n" + StackTraceExtractor.getStackTrace(e));