public SearchGuardLicense(final StreamInput in) throws IOException { uid = in.readString(); type = in.readEnum(Type.class); issueDate = in.readString(); expiryDate = in.readString(); issuedTo = in.readString(); issuer = in.readString(); startDate = in.readString(); majorVersion = in.readOptionalVInt(); clusterName = in.readString(); allowedNodeCount = in.readInt(); msgs.addAll(in.readList(StreamInput::readString)); expiresInDays = in.readLong(); isExpired = in.readBoolean(); valid = in.readBoolean(); action = in.readString(); prodUsage = in.readString(); features = in.readArray(new Reader<Feature>() { @Override public Feature read(StreamInput in) throws IOException { return in.readEnum(Feature.class); }}, Feature[]::new); clusterService = null; }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readLong(); }
public long[] readLongArray() throws IOException { int length = readArraySize(); long[] values = new long[length]; for (int i = 0; i < length; i++) { values[i] = readLong(); } return values; }
/** * Read from a stream. */ public CardinalityAggregationBuilder(StreamInput in) throws IOException { super(in, ValuesSourceType.ANY); if (in.readBoolean()) { precisionThreshold = in.readLong(); } }
public Entry(StreamInput in) throws IOException { this.snapshot = new Snapshot(in); this.startTime = in.readVLong(); this.repositoryStateId = in.readLong(); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); requestId = in.readLong(); hits = SearchHits.readSearchHits(in); }
/** * Read from a stream. */ public Bucket(StreamInput in, DocValueFormat format) throws IOException { this.format = format; key = in.readLong(); docCount = in.readVLong(); aggregations = InternalAggregations.readAggregations(in); }
/** * Read from a stream. */ private Bucket(StreamInput in) throws IOException { geohashAsLong = in.readLong(); docCount = in.readVLong(); aggregations = InternalAggregations.readAggregations(in); }
protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { super(in); this.interval = in.readLong(); this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); if (in.readBoolean()) { timeZone = DateTimeZone.forID(in.readString()); } }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); taskId = in.readString(); allocationId = in.readLong(); state = in.readOptionalNamedWriteable(PersistentTaskState.class); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); shardId = ShardId.readShardId(in); clusterStateVersion = in.readVLong(); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); shardId = ShardId.readShardId(in); snapshotFiles = new Store.MetadataSnapshot(in); totalTranslogOps = in.readVInt(); }
@Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); clusterName = new ClusterName(in); indexUUID = in.readString(); shardId = ShardId.readShardId(in); timeout = new TimeValue(in.readLong(), TimeUnit.MILLISECONDS); }
ClusterStateDiff(StreamInput in, DiscoveryNode localNode) throws IOException { clusterName = new ClusterName(in); fromUuid = in.readString(); toUuid = in.readString(); toVersion = in.readLong(); routingTable = RoutingTable.readDiffFrom(in); nodes = DiscoveryNodes.readDiffFrom(in, localNode); metaData = MetaData.readDiffFrom(in); blocks = ClusterBlocks.readDiffFrom(in); customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); }