public Builder mergeFrom(io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse other) { if (other == io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse.getDefaultInstance()) return this; if (other.status_ != 0) { setStatusValue(other.getStatusValue()); if (other.getEpoch() != 0) { setEpoch(other.getEpoch());
scaleTimestamp, null, context, executor) .handle((startScaleResponse, e) -> { ScaleResponse.Builder response = ScaleResponse.newBuilder();
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + status_; if (getSegmentsCount() > 0) { hash = (37 * hash) + SEGMENTS_FIELD_NUMBER; hash = (53 * hash) + getSegmentsList().hashCode(); } hash = (37 * hash) + EPOCH_FIELD_NUMBER; hash = (53 * hash) + getEpoch(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(1L), newRanges, 30, null).get(); assertTrue(scaleOpResult.getStatus().equals(ScaleStreamStatus.STARTED));
newRanges, 30, null).get(); assertEquals(ScaleStreamStatus.STARTED, scaleOpResult.getStatus()); OperationContext context = streamStorePartialMock.createContext(SCOPE, "test"); assertEquals(streamStorePartialMock.getState(SCOPE, "test", false, context, executor).get(), State.ACTIVE);
Controller.ScaleResponse scaleStatus = consumer.scale(SCOPE, STREAM, Arrays.asList(1L), keyRanges, start + 20) .get(); assertEquals(Controller.ScaleResponse.ScaleStreamStatus.STARTED, scaleStatus.getStatus()); AtomicBoolean done = new AtomicBoolean(false); Futures.loop(() -> !done.get(), () -> consumer.checkScale(SCOPE, STREAM, scaleStatus.getEpoch()) .thenAccept(x -> done.set(x.getStatus().equals(Controller.ScaleStatusResponse.ScaleStatus.SUCCESS))), executor).get();
cancellableRequest.start(() -> { if (started) { return checkScaleStatus(stream, startScaleResponse.getEpoch()); } else { return CompletableFuture.completedFuture(false);
this.controllerService.scale(scaleRequest, result2); final ScaleResponse scaleResponse = result2.get(); Assert.assertEquals(ScaleResponse.ScaleStreamStatus.STARTED, scaleResponse.getStatus()); final Controller.ScaleStatusRequest scalestatusRequest = Controller.ScaleStatusRequest.newBuilder() .setStreamInfo(ModelHelper.createStreamInfo(SCOPE1, STREAM1)) .setEpoch(scaleResponse.getEpoch()) .build(); Assert.assertEquals(2, scaleResponse.getSegmentsCount());
@Test(timeout = 30000) public void sealStreamTest() throws Exception { assertNotEquals(0, consumer.getCurrentSegments(SCOPE, stream1).get().size()); WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor); streamMetadataTasks.setRequestEventWriter(requestEventWriter); //seal a stream. CompletableFuture<UpdateStreamStatus.Status> sealOperationResult = streamMetadataTasks.sealStream(SCOPE, stream1, null); assertTrue(Futures.await(processEvent(requestEventWriter))); assertEquals(UpdateStreamStatus.Status.SUCCESS, sealOperationResult.get()); //a sealed stream should have zero active/current segments assertEquals(0, consumer.getCurrentSegments(SCOPE, stream1).get().size()); assertTrue(streamStorePartialMock.isSealed(SCOPE, stream1, null, executor).get()); //reseal a sealed stream. assertEquals(UpdateStreamStatus.Status.SUCCESS, streamMetadataTasks.sealStream(SCOPE, stream1, null).get()); assertTrue(Futures.await(processEvent(requestEventWriter))); //scale operation on the sealed stream. AbstractMap.SimpleEntry<Double, Double> segment3 = new AbstractMap.SimpleEntry<>(0.0, 0.2); AbstractMap.SimpleEntry<Double, Double> segment4 = new AbstractMap.SimpleEntry<>(0.2, 0.4); AbstractMap.SimpleEntry<Double, Double> segment5 = new AbstractMap.SimpleEntry<>(0.4, 0.5); ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, stream1, Collections.singletonList(0L), Arrays.asList(segment3, segment4, segment5), 30, null).get(); // scaling operation fails once a stream is sealed. assertEquals(ScaleStreamStatus.FAILURE, scaleOpResult.getStatus()); AssertExtensions.assertFutureThrows("Scale should not be allowed as stream is already sealed", streamStorePartialMock.submitScale(SCOPE, stream1, Collections.singletonList(0L), Arrays.asList(segment3, segment4, segment5), 30, null, null, executor), e -> Exceptions.unwrap(e) instanceof StoreException.IllegalStateException); }
@Test public void testScaleStream() throws ExecutionException, InterruptedException { when(this.mockControllerService.checkScale(anyString(), anyString(), anyInt())).thenReturn( CompletableFuture.completedFuture(Controller.ScaleStatusResponse.newBuilder() .setStatus(Controller.ScaleStatusResponse.ScaleStatus.SUCCESS).build())); when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong())).thenReturn( CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder() .setStatus(Controller.ScaleResponse.ScaleStreamStatus.STARTED).build())); Assert.assertTrue(this.testController.scaleStream(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>(), executorService()).getFuture().join()); when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong())).thenReturn( CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder() .setStatus(Controller.ScaleResponse.ScaleStreamStatus.PRECONDITION_FAILED).build())); Assert.assertFalse(this.testController.scaleStream(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>(), executorService()).getFuture().join()); when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong())).thenReturn( CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder() .setStatus(Controller.ScaleResponse.ScaleStreamStatus.FAILURE).build())); assertThrows("Expected ControllerFailureException", () -> this.testController.startScale(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>()).join(), ex -> ex instanceof ControllerFailureException); when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong())).thenReturn( CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder() .setStatusValue(-1).build())); assertThrows("Expected ControllerFailureException", () -> this.testController.startScale(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>()).join(), ex -> ex instanceof ControllerFailureException); }
@io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "scale", requestType = io.pravega.controller.stream.api.grpc.v1.Controller.ScaleRequest.class, responseType = io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor<io.pravega.controller.stream.api.grpc.v1.Controller.ScaleRequest, io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse> getScaleMethod() { io.grpc.MethodDescriptor<io.pravega.controller.stream.api.grpc.v1.Controller.ScaleRequest, io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse> getScaleMethod; if ((getScaleMethod = ControllerServiceGrpc.getScaleMethod) == null) { synchronized (ControllerServiceGrpc.class) { if ((getScaleMethod = ControllerServiceGrpc.getScaleMethod) == null) { ControllerServiceGrpc.getScaleMethod = getScaleMethod = io.grpc.MethodDescriptor.<io.pravega.controller.stream.api.grpc.v1.Controller.ScaleRequest, io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "io.pravega.controller.stream.api.grpc.v1.ControllerService", "scale")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.pravega.controller.stream.api.grpc.v1.Controller.ScaleRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse.getDefaultInstance())) .setSchemaDescriptor(new ControllerServiceMethodDescriptorSupplier("scale")) .build(); } } } return getScaleMethod; }
@Test public void eventWriterInitializationTest() throws Exception { final ScalingPolicy policy = ScalingPolicy.fixed(1); final StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).build(); streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get(); streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get(); AssertExtensions.assertThrows("", () -> streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(0L), Arrays.asList(), 30, null).get(), e -> e instanceof TaskExceptions.ProcessingDisabledException); streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor)); List<AbstractMap.SimpleEntry<Double, Double>> newRanges = new ArrayList<>(); newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.5)); newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 1.0)); ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(0L), newRanges, 30, null).get(); assertEquals(ScaleStreamStatus.STARTED, scaleOpResult.getStatus()); Controller.ScaleStatusResponse scaleStatusResult = streamMetadataTasks.checkScale(SCOPE, "UNKNOWN", 0, null).get(); assertEquals(Controller.ScaleStatusResponse.ScaleStatus.INVALID_INPUT, scaleStatusResult.getStatus()); scaleStatusResult = streamMetadataTasks.checkScale("UNKNOWN", "test", 0, null).get(); assertEquals(Controller.ScaleStatusResponse.ScaleStatus.INVALID_INPUT, scaleStatusResult.getStatus()); scaleStatusResult = streamMetadataTasks.checkScale(SCOPE, "test", 5, null).get(); assertEquals(Controller.ScaleStatusResponse.ScaleStatus.INVALID_INPUT, scaleStatusResult.getStatus()); }
@Override public void scale(ScaleRequest request, StreamObserver<ScaleResponse> responseObserver) { if (request.getStreamInfo().getStream().equals("stream1")) { responseObserver.onNext(ScaleResponse.newBuilder() .setStatus(ScaleResponse.ScaleStreamStatus.STARTED) .addSegments(ModelHelper.createSegmentRange("scope1", "stream1", 0, 0.0, 0.5)) .addSegments(ModelHelper.createSegmentRange("scope1", "stream1", 1, 0.5, 1.0)) .setEpoch(0) .build()); responseObserver.onCompleted(); } else { responseObserver.onError(Status.INTERNAL.withDescription("Server error").asRuntimeException()); } }
@Override public CompletableFuture<Boolean> startScale(final Stream stream, final List<Long> sealedSegments, final Map<Double, Double> newKeyRanges) { return startScaleInternal(stream, sealedSegments, newKeyRanges) .thenApply(x -> { switch (x.getStatus()) { case FAILURE: throw new ControllerFailureException("Failed to scale stream: " + stream); case PRECONDITION_FAILED: return false; case STARTED: return true; default: throw new ControllerFailureException("Unknown return status scaling stream " + stream + " " + x.getStatus()); } }); }
@Override public CancellableRequest<Boolean> scaleStream(final Stream stream, final List<Long> sealedSegments, final Map<Double, Double> newKeyRanges, final ScheduledExecutorService executor) { CancellableRequest<Boolean> cancellableRequest = new CancellableRequest<>(); startScaleInternal(stream, sealedSegments, newKeyRanges) .whenComplete((startScaleResponse, e) -> { if (e != null) { cancellableRequest.start(() -> Futures.failedFuture(e), any -> true, executor); } else { final boolean started = startScaleResponse.getStatus().equals(ScaleResponse.ScaleStreamStatus.STARTED); cancellableRequest.start(() -> { if (started) { return checkScaleStatus(stream, startScaleResponse.getEpoch()); } else { return CompletableFuture.completedFuture(false); } }, isDone -> !started || isDone, executor); } }); return cancellableRequest; }
public io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse buildPartial() { io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse result = new io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.status_ = status_; if (segmentsBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002)) { segments_ = java.util.Collections.unmodifiableList(segments_); bitField0_ = (bitField0_ & ~0x00000002); } result.segments_ = segments_; } else { result.segments_ = segmentsBuilder_.build(); } result.epoch_ = epoch_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse)) { return super.equals(obj); } io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse other = (io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse) obj; boolean result = true; result = result && status_ == other.status_; result = result && getSegmentsList() .equals(other.getSegmentsList()); result = result && (getEpoch() == other.getEpoch()); result = result && unknownFields.equals(other.unknownFields); return result; }
private Boolean handleScaleResponse(Stream stream, ScaleResponse response, long requestId) { switch (response.getStatus()) { case FAILURE: log.warn(requestId, "Failed to scale stream: {}", stream.getStreamName()); throw new ControllerFailureException("Failed to scale stream: " + stream); case PRECONDITION_FAILED: log.warn(requestId, "Precondition failed for scale stream: {}", stream.getStreamName()); return false; case STARTED: log.info(requestId, "Successfully started scale stream: {}", stream.getStreamName()); return true; case UNRECOGNIZED: default: throw new ControllerFailureException("Unknown return status scaling stream " + stream + " " + response.getStatus()); } }
public io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse getDefaultInstanceForType() { return io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse.getDefaultInstance(); }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() {