@Override public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) { List<String> fieldNames = FieldInfoUtils.getFieldNames(fields); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames); return new MongoStreamsDataSource(uri.toString(), properties, serializer); }
public static List<String> getFieldNames(List<FieldInfo> fields) { return Lists.transform(fields, new FieldNameExtractor()); }
/** * Convert a Avro object to a Java object, changing the Avro Utf8 type to Java String. * @param value Avro object * @return Java object */ public static Object convertAvroUtf8(Object value) { Object ret; if (value instanceof Utf8) { ret = value.toString(); } else if (value instanceof Map<?, ?>) { ret = convertAvroUtf8Map((Map<Object,Object>)value); } else if (value instanceof GenericData.Array) { ret = convertAvroUtf8Array((GenericData.Array)value); } else { ret = value; } return ret; }
@Override public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) { String host = uri.getHost(); int port = uri.getPort(); if (port == -1) { throw new RuntimeException("Port information is not available. URI: " + uri); } List<String> fieldNames = FieldInfoUtils.getFieldNames(fields); Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames); return new SocketDataSourcesProvider.SocketStreamsDataSource(host, port, scheme, serializer); } }
@Override public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) { List<String> fieldNames = new ArrayList<>(); int primaryIndex = -1; for (int i = 0; i < fields.size(); ++i) { FieldInfo f = fields.get(i); fieldNames.add(f.name()); if (f.isPrimary()) { primaryIndex = i; } } Preconditions.checkState(primaryIndex != -1, "Kafka stream table must have a primary key"); Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames); Map<String, String> values = parseUriParams(uri.getQuery()); String bootstrapServers = values.get(URI_PARAMS_BOOTSTRAP_SERVERS); Preconditions.checkNotNull(bootstrapServers, "bootstrap-servers must be specified"); String topic = uri.getHost(); KafkaSpoutConfig<ByteBuffer, ByteBuffer> kafkaSpoutConfig = new KafkaSpoutConfig.Builder<ByteBuffer, ByteBuffer>(bootstrapServers, topic) .setProp(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class) .setProp(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class) .setProp(ConsumerConfig.GROUP_ID_CONFIG, "storm-sql-kafka-" + UUID.randomUUID().toString()) .setRecordTranslator(new RecordTranslatorSchemeAdapter(scheme)) .build(); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames); return new KafkaStreamsDataSource(kafkaSpoutConfig, bootstrapServers, topic, properties, serializer); }
private static Object convertAvroUtf8Map(Map<Object,Object> value) { Map<Object, Object> map = new HashMap<>(value.size()); for (Map.Entry<Object, Object> entry : value.entrySet()) { Object k = convertAvroUtf8(entry.getKey()); Object v = convertAvroUtf8(entry.getValue()); map.put(k, v); } return map; }
@Override public List<Object> deserialize(ByteBuffer ser) { String data = new String(Utils.toByteArray(ser), StandardCharsets.UTF_8); List<String> parts = org.apache.storm.sql.runtime.utils.Utils.split(data, delimiter); Preconditions.checkArgument(parts.size() == fieldNames.size(), "Invalid schema"); ArrayList<Object> list = new ArrayList<>(fieldNames.size()); list.addAll(parts); return list; }
@Override public ISqlStreamsDataSource constructStreams(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) { List<String> fieldNames = FieldInfoUtils.getFieldNames(fields); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames); return new HdfsStreamsDataSource(uri.toString(), properties, serializer); }
@Override public ISqlTridentDataSource constructTrident(URI uri, String inputFormatClass, String outputFormatClass, Properties properties, List<FieldInfo> fields) { String host = uri.getHost(); int port = uri.getPort(); if (port == -1) { throw new RuntimeException("Port information is not available. URI: " + uri); } List<String> fieldNames = FieldInfoUtils.getFieldNames(fields); Scheme scheme = SerdeUtils.getScheme(inputFormatClass, properties, fieldNames); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, properties, fieldNames); return new SocketTridentDataSource(scheme, serializer, host, port); } }
public static Object convertAvroUtf8(Object value){ Object ret; if (value instanceof Utf8) { ret = value.toString(); } else if (value instanceof Map<?, ?>) { ret = convertAvroUtf8Map((Map<Object,Object>)value); } else if (value instanceof GenericData.Array) { ret = convertAvroUtf8Array((GenericData.Array)value); } else { ret = value; } return ret; }
private static Object convertAvroUtf8Array(GenericData.Array value) { List<Object> ls = new ArrayList<>(value.size()); for (Object o : value) { ls.add(convertAvroUtf8(o)); } return ls; } }
public static List<String> getFieldNames(List<FieldInfo> fields) { return Lists.transform(fields, new FieldNameExtractor()); }
@Override public List<Object> deserialize(ByteBuffer ser) { String data = new String(Utils.toByteArray(ser), StandardCharsets.UTF_8); List<String> parts = org.apache.storm.sql.runtime.utils.Utils.split(data, delimiter); Preconditions.checkArgument(parts.size() == fieldNames.size(), "Invalid schema"); ArrayList<Object> list = new ArrayList<>(fieldNames.size()); list.addAll(parts); return list; }
@Override public ISqlStreamsDataSource constructStreams( URI uri, String inputFormatClass, String outputFormatClass, Properties props, List<FieldInfo> fields) { Preconditions.checkArgument(JedisURIHelper.isValid(uri), "URI is not valid for Redis: " + uri); String host = uri.getHost(); int port = uri.getPort() != -1 ? uri.getPort() : DEFAULT_REDIS_PORT; int dbIdx = JedisURIHelper.getDBIndex(uri); String password = JedisURIHelper.getPassword(uri); int timeout = Integer.parseInt(props.getProperty(PROPERTY_REDIS_TIMEOUT, String.valueOf(DEFAULT_TIMEOUT))); boolean clusterMode = Boolean.valueOf(props.getProperty(PROPERTY_USE_REDIS_CLUSTER, "false")); List<String> fieldNames = FieldInfoUtils.getFieldNames(fields); IOutputSerializer serializer = SerdeUtils.getSerializer(outputFormatClass, props, fieldNames); if (clusterMode) { JedisClusterConfig config = new JedisClusterConfig.Builder() .setNodes(Collections.singleton(new InetSocketAddress(host, port))) .setTimeout(timeout) .build(); return new RedisClusterStreamsDataSource(config, props, fields, serializer); } else { JedisPoolConfig config = new JedisPoolConfig(host, port, timeout, password, dbIdx); return new RedisStreamsDataSource(config, props, fields, serializer); } }
@Override public List<Object> deserialize(ByteBuffer ser) { try { Schema schema = schemas.getSchema(schemaString); DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(Utils.toByteArray(ser), null); GenericRecord record = reader.read(null, decoder); ArrayList<Object> list = new ArrayList<>(fieldNames.size()); for (String field : fieldNames) { Object value = record.get(field); // Avro strings are stored using a special Avro Utf8 type instead of using Java primitives list.add(SerdeUtils.convertAvroUtf8(value)); } return list; } catch (IOException e) { throw new RuntimeException(e); } }
public static Object convertAvroUtf8Map(Map<Object,Object> value) { Map<Object, Object> map = new HashMap<>(value.size()); for (Map.Entry<Object, Object> entry : value.entrySet()) { Object k = convertAvroUtf8(entry.getKey()); Object v = convertAvroUtf8(entry.getValue()); map.put(k, v); } return map; }
public static Object convertAvroUtf8Array(GenericData.Array value){ List<Object> ls = new ArrayList<>(value.size()); for(Object o : value){ ls.add(convertAvroUtf8(o)); } return ls; } }
@Override public List<Object> deserialize(ByteBuffer ser) { try { Schema schema = schemas.getSchema(schemaString); DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(Utils.toByteArray(ser), null); GenericRecord record = reader.read(null, decoder); ArrayList<Object> list = new ArrayList<>(fieldNames.size()); for (String field : fieldNames) { Object value = record.get(field); // Avro strings are stored using a special Avro Utf8 type instead of using Java primitives list.add(SerdeUtils.convertAvroUtf8(value)); } return list; } catch (IOException e) { throw new RuntimeException(e); } }