/** * Clones the given serializable object using Java serialization. * * @param obj Object to clone * @param <T> Type of the object to clone * @return The cloned object * * @throws IOException Thrown if the serialization or deserialization process fails. * @throws ClassNotFoundException Thrown if any of the classes referenced by the object * cannot be resolved during deserialization. */ public static <T extends Serializable> T clone(T obj) throws IOException, ClassNotFoundException { if (obj == null) { return null; } else { return clone(obj, obj.getClass().getClassLoader()); } }
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer( ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) { try { return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException ex) { throw new CloneFailedException( "Could not clone serializer instance of class " + original.getClass(), ex); } }
@Override public TableSink<Row> configure(String[] fieldNames, TypeInformation<?>[] fieldTypes) { int[] types = outputFormat.getTypesArray(); String sinkSchema = String.join(", ", IntStream.of(types).mapToObj(JDBCTypeUtil::getTypeName).collect(Collectors.toList())); String tableSchema = String.join(", ", Stream.of(fieldTypes).map(JDBCTypeUtil::getTypeName).collect(Collectors.toList())); String msg = String.format("Schema of output table is incompatible with JDBCAppendTableSink schema. " + "Table schema: [%s], sink schema: [%s]", tableSchema, sinkSchema); Preconditions.checkArgument(fieldTypes.length == types.length, msg); for (int i = 0; i < types.length; ++i) { Preconditions.checkArgument( JDBCTypeUtil.typeInformationToSqlType(fieldTypes[i]) == types[i], msg); } JDBCAppendTableSink copy; try { copy = new JDBCAppendTableSink(InstantiationUtil.clone(outputFormat)); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException(e); } copy.fieldNames = fieldNames; copy.fieldTypes = fieldTypes; return copy; }
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread( List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions, Node leader, ExceptionProxy errorHandler) throws IOException, ClassNotFoundException { // each thread needs its own copy of the deserializer, because the deserializer is // not necessarily thread safe final KeyedDeserializationSchema<T> clonedDeserializer = InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader()); // seed thread with list of fetch partitions (otherwise it would shut down immediately again SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>( this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue, clonedDeserializer, invalidOffsetBehavior); brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)", runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port())); brokerThread.setDaemon(true); brokerThread.start(); LOG.info("Starting thread {}", brokerThread.getName()); return brokerThread; }
/** * Clones the given serializable object using Java serialization. * * @param obj Object to clone * @param <T> Type of the object to clone * @return The cloned object * * @throws IOException Thrown if the serialization or deserialization process fails. * @throws ClassNotFoundException Thrown if any of the classes referenced by the object * cannot be resolved during deserialization. */ public static <T extends Serializable> T clone(T obj) throws IOException, ClassNotFoundException { if (obj == null) { return null; } else { return clone(obj, obj.getClass().getClassLoader()); } }
@Override public T copy(T from) { try { return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e); } }
/** * Clones the given serializable object using Java serialization. * * @param obj Object to clone * @param <T> Type of the object to clone * @return The cloned object * * @throws IOException Thrown if the serialization or deserialization process fails. * @throws ClassNotFoundException Thrown if any of the classes referenced by the object * cannot be resolved during deserialization. */ public static <T extends Serializable> T clone(T obj) throws IOException, ClassNotFoundException { if (obj == null) { return null; } else { return clone(obj, obj.getClass().getClassLoader()); } }
@Override public T copy(T from) { try { return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e); } }
@Override public T copy(T from) { try { return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e); } }
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer( ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) { try { return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException ex) { throw new CloneFailedException( "Could not clone serializer instance of class " + original.getClass(), ex); } }
@Override public T copy(T from) { try { return InstantiationUtil.clone(from, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException e) { throw new FlinkRuntimeException("Could not copy element via serialization: " + from, e); } }
private ExecutionConfig.SerializableSerializer<? extends Serializer<?>> deepCopySerializer( ExecutionConfig.SerializableSerializer<? extends Serializer<?>> original) { try { return InstantiationUtil.clone(original, Thread.currentThread().getContextClassLoader()); } catch (IOException | ClassNotFoundException ex) { throw new CloneFailedException( "Could not clone serializer instance of class " + original.getClass(), ex); } }
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread( List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions, Node leader, ExceptionProxy errorHandler) throws IOException, ClassNotFoundException { // each thread needs its own copy of the deserializer, because the deserializer is // not necessarily thread safe final KeyedDeserializationSchema<T> clonedDeserializer = InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader()); // seed thread with list of fetch partitions (otherwise it would shut down immediately again SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>( this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue, clonedDeserializer, invalidOffsetBehavior); brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)", runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port())); brokerThread.setDaemon(true); brokerThread.start(); LOG.info("Starting thread {}", brokerThread.getName()); return brokerThread; }
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread( List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions, Node leader, ExceptionProxy errorHandler) throws IOException, ClassNotFoundException { // each thread needs its own copy of the deserializer, because the deserializer is // not necessarily thread safe final KeyedDeserializationSchema<T> clonedDeserializer = InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader()); // seed thread with list of fetch partitions (otherwise it would shut down immediately again SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>( this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue, clonedDeserializer, invalidOffsetBehavior); brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)", runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port())); brokerThread.setDaemon(true); brokerThread.start(); LOG.info("Starting thread {}", brokerThread.getName()); return brokerThread; }
private SimpleConsumerThread<T> createAndStartSimpleConsumerThread( List<KafkaTopicPartitionState<TopicAndPartition>> seedPartitions, Node leader, ExceptionProxy errorHandler) throws IOException, ClassNotFoundException { // each thread needs its own copy of the deserializer, because the deserializer is // not necessarily thread safe final KeyedDeserializationSchema<T> clonedDeserializer = InstantiationUtil.clone(deserializer, runtimeContext.getUserCodeClassLoader()); // seed thread with list of fetch partitions (otherwise it would shut down immediately again SimpleConsumerThread<T> brokerThread = new SimpleConsumerThread<>( this, errorHandler, kafkaConfig, leader, seedPartitions, unassignedPartitionsQueue, clonedDeserializer, invalidOffsetBehavior); brokerThread.setName(String.format("SimpleConsumer - %s - broker-%s (%s:%d)", runtimeContext.getTaskName(), leader.id(), leader.host(), leader.port())); brokerThread.setDaemon(true); brokerThread.start(); LOG.info("Starting thread {}", brokerThread.getName()); return brokerThread; }
@Override public CompletableFuture<Acknowledge> updateJob(JobUpdateRequest request, Time timeout) { // generate a new JobGraph with the job update request JobGraph newJobGraph; try { // Make a deep copy of current JobGraph to avoid racing newJobGraph = InstantiationUtil.clone(jobGraph); } catch (Exception e) { return FutureUtils.completedExceptionally( new JobModificationException("Failed to make a copy of current job graph " + jobGraph.getJobID(), e)); } for (JobUpdateAction action : request.getJobUpdateActions()) { if (action instanceof JobGraphUpdateAction) { ((JobGraphUpdateAction) action).updateJobGraph(newJobGraph); } else if (action instanceof JobGraphReplaceAction) { newJobGraph = ((JobGraphReplaceAction) action).getNewJobGraph(); } else { return FutureUtils.completedExceptionally( new IllegalArgumentException("Unknown job update action: " + action)); } } // update the job with the new JobGraph return updateJob(newJobGraph); }