/** * <p>Serializes an object into its JSON form.</p> * * <p>This method delegates serialization to {@code JSONSerializers.getLegacy}</p> * * @param object object to serialize * @param buf StringBuilder containing the JSON representation under construction * @see JSONSerializers#getLegacy() */ public static void serialize(final Object object, final StringBuilder buf) { JSONSerializers.getLegacy().serialize(object, buf); }
@Override public String toString() { return "Timestamp.GTID(ts=" + JSON.serialize(ts) + ", gtid=" + JSONSerializers.getStrict().serialize(gtid) + ")"; }
/** * Returns an {@code ObjectSerializer} that conforms to the strict JSON format defined in * <a href="http://docs.mongodb.org/manual/reference/mongodb-extended-json/">extended JSON</a>. * * @return object serializer * @mongodb.driver.manual reference/mongodb-extended-json/ MongoDB Extended JSON */ public static ObjectSerializer getStrict() { ClassMapBasedObjectSerializer serializer = addCommonSerializers(); serializer.addObjectSerializer(Date.class, new DateSerializer(serializer)); serializer.addObjectSerializer(BSONTimestamp.class, new BSONTimestampSerializer(serializer)); serializer.addObjectSerializer(Binary.class, new BinarySerializer(serializer)); serializer.addObjectSerializer(byte[].class, new ByteArraySerializer(serializer)); return serializer; }
@Override public void saveFields(XContentBuilder builder) throws IOException { builder.field(MongoDBRiver.LAST_TIMESTAMP_FIELD, JSON.serialize(ts)); builder.field(MongoDBRiver.LAST_GTID_FIELD, JSONSerializers.getStrict().serialize(gtid)); } }
/** * <p>Serializes an object into its JSON form.</p> * * <p>This method delegates serialization to {@code JSONSerializers.getLegacy}</p> * * @param object object to serialize * @param buf StringBuilder containing the JSON representation under construction * @see JSONSerializers#getLegacy() */ public static void serialize(final Object object, final StringBuilder buf) { JSONSerializers.getLegacy().serialize(object, buf); }
/** * Returns an {@code ObjectSerializer} that mostly conforms to the strict JSON format defined in * <a href="http://docs.mongodb.org/manual/reference/mongodb-extended-json/">extended JSON</a>, but with a few differences to keep * compatibility with previous versions of the driver. Clients should generally prefer {@code getStrict} in preference to this method. * * @return object serializer * @mongodb.driver.manual reference/mongodb-extended-json/ MongoDB Extended JSON * @see #getStrict() */ public static ObjectSerializer getLegacy() { ClassMapBasedObjectSerializer serializer = addCommonSerializers(); serializer.addObjectSerializer(Date.class, new LegacyDateSerializer(serializer)); serializer.addObjectSerializer(BSONTimestamp.class, new LegacyBSONTimestampSerializer(serializer)); serializer.addObjectSerializer(Binary.class, new LegacyBinarySerializer()); serializer.addObjectSerializer(byte[].class, new LegacyBinarySerializer()); return serializer; }
private boolean isValidOplogEntry(final DBObject entry, final Timestamp<?> startTimestamp) { if (!entry.containsField(MongoDBRiver.OPLOG_OPERATION)) { logger.trace("[Empty Oplog Entry] - can be ignored. {}", JSONSerializers.getStrict().serialize(entry)); return false; logger.trace("[No-op Oplog Entry] - can be ignored. {}", JSONSerializers.getStrict().serialize(entry)); return false; logger.trace("[Invalid Oplog Entry] - from migration or sharding operation. Can be ignored. {}", JSONSerializers.getStrict().serialize(entry)); return false; if (Timestamp.compare(oplogTimestamp, startTimestamp) < 0) { logger.error("[Invalid Oplog Entry] - entry timestamp [{}] before startTimestamp [{}]", JSONSerializers.getStrict().serialize(entry), startTimestamp); return false;
/** * Returns an {@code ObjectSerializer} that conforms to the strict JSON format defined in * <a href="http://docs.mongodb.org/manual/reference/mongodb-extended-json/">extended JSON</a>. * * @return object serializer * @mongodb.driver.manual reference/mongodb-extended-json/ MongoDB Extended JSON */ public static ObjectSerializer getStrict() { ClassMapBasedObjectSerializer serializer = addCommonSerializers(); serializer.addObjectSerializer(Date.class, new DateSerializer(serializer)); serializer.addObjectSerializer(BSONTimestamp.class, new BSONTimestampSerializer(serializer)); serializer.addObjectSerializer(Binary.class, new BinarySerializer(serializer)); serializer.addObjectSerializer(byte[].class, new ByteArraySerializer(serializer)); return serializer; }
@Test public void shouldGenerateRecordForUpdateEvent() throws InterruptedException { BsonTimestamp ts = new BsonTimestamp(1000, 1); CollectionId collectionId = new CollectionId("rs0", "dbA", "c1"); ObjectId objId = new ObjectId(); Document obj = new Document().append("$set", new Document("name", "Sally")); Document event = new Document().append("o", obj) .append("o2", objId) .append("ns", "dbA.c1") .append("ts", ts) .append("h", Long.valueOf(12345678)) .append("op", "u"); RecordsForCollection records = recordMakers.forCollection(collectionId); records.recordEvent(event, 1002); assertThat(produced.size()).isEqualTo(1); SourceRecord record = produced.get(0); Struct key = (Struct) record.key(); Struct value = (Struct) record.value(); assertThat(key.schema()).isSameAs(record.keySchema()); assertThat(key.get("id")).isEqualTo(JSONSerializers.getStrict().serialize(objId)); assertThat(value.schema()).isSameAs(record.valueSchema()); // assertThat(value.getString(FieldName.BEFORE)).isNull(); assertThat(value.getString(FieldName.AFTER)).isNull(); assertThat(value.getString("patch")).isEqualTo(obj.toJson(WRITER_SETTINGS)); assertThat(value.getString(FieldName.OPERATION)).isEqualTo(Operation.UPDATE.code()); assertThat(value.getInt64(FieldName.TIMESTAMP)).isEqualTo(1002L); Struct actualSource = value.getStruct(FieldName.SOURCE); Struct expectedSource = source.lastOffsetStruct("rs0", collectionId); assertThat(actualSource).isEqualTo(expectedSource); }
/** * Returns an {@code ObjectSerializer} that mostly conforms to the strict JSON format defined in * <a href="http://docs.mongodb.org/manual/reference/mongodb-extended-json/">extended JSON</a>, but with a few differences to keep * compatibility with previous versions of the driver. Clients should generally prefer {@code getStrict} in preference to this method. * * @return object serializer * @mongodb.driver.manual reference/mongodb-extended-json/ MongoDB Extended JSON * @see #getStrict() */ public static ObjectSerializer getLegacy() { ClassMapBasedObjectSerializer serializer = addCommonSerializers(); serializer.addObjectSerializer(Date.class, new LegacyDateSerializer(serializer)); serializer.addObjectSerializer(BSONTimestamp.class, new LegacyBSONTimestampSerializer(serializer)); serializer.addObjectSerializer(Binary.class, new LegacyBinarySerializer()); serializer.addObjectSerializer(byte[].class, new LegacyBinarySerializer()); return serializer; }
Struct value = (Struct) record.value(); assertThat(key.schema()).isSameAs(record.keySchema()); assertThat(key.get("id")).isEqualTo(JSONSerializers.getStrict().serialize(objId)); assertThat(value.schema()).isSameAs(record.valueSchema()); assertThat(value.getString(FieldName.AFTER)).isNull(); Struct key2 = (Struct) tombstone.key(); assertThat(key2.schema()).isSameAs(tombstone.keySchema()); assertThat(key2.get("id")).isEqualTo(JSONSerializers.getStrict().serialize(objId)); assertThat(tombstone.value()).isNull(); assertThat(tombstone.valueSchema()).isNull();
@Test @FixFor("DBZ-582") public void shouldGenerateRecordForDeleteEventWithoutTombstone() throws InterruptedException { RecordMakers recordMakers = new RecordMakers(filters, source, topicSelector, produced::add, false); BsonTimestamp ts = new BsonTimestamp(1000, 1); CollectionId collectionId = new CollectionId("rs0", "dbA", "c1"); ObjectId objId = new ObjectId(); Document obj = new Document("_id", objId); Document event = new Document().append("o", obj) .append("ns", "dbA.c1") .append("ts", ts) .append("h", new Long(12345678)) .append("op", "d"); RecordsForCollection records = recordMakers.forCollection(collectionId); records.recordEvent(event, 1002); assertThat(produced.size()).isEqualTo(1); SourceRecord record = produced.get(0); Struct key = (Struct) record.key(); Struct value = (Struct) record.value(); assertThat(key.schema()).isSameAs(record.keySchema()); assertThat(key.get("id")).isEqualTo(JSONSerializers.getStrict().serialize(objId)); assertThat(value.schema()).isSameAs(record.valueSchema()); assertThat(value.getString(FieldName.AFTER)).isNull(); assertThat(value.getString("patch")).isNull(); assertThat(value.getString(FieldName.OPERATION)).isEqualTo(Operation.DELETE.code()); assertThat(value.getInt64(FieldName.TIMESTAMP)).isEqualTo(1002L); Struct actualSource = value.getStruct(FieldName.SOURCE); Struct expectedSource = source.lastOffsetStruct("rs0", collectionId); assertThat(actualSource).isEqualTo(expectedSource); }
@GET @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response get(@Context Response response, @QueryParam(LumongoConstants.PRETTY) boolean pretty) { try { Lumongo.GetIndexesResponse getIndexesResponse = indexManager.getIndexes(Lumongo.GetIndexesRequest.newBuilder().build()); Document mongoDocument = new org.bson.Document(); mongoDocument.put("indexes", getIndexesResponse.getIndexNameList()); String docString = JSONSerializers.getStrict().serialize(mongoDocument); if (pretty) { docString = JsonWriter.formatJson(docString); } return Response.status(LumongoConstants.SUCCESS).entity(docString).build(); } catch (Exception e) { return Response.status(LumongoConstants.INTERNAL_ERROR).entity("Failed to get index names: " + e.getMessage()).build(); } }
@GET @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response get(@Context Response response, @QueryParam(LumongoConstants.INDEX) final String indexName, @QueryParam(LumongoConstants.PRETTY) boolean pretty) { if (indexName != null) { Lumongo.GetFieldNamesRequest fieldNamesRequest = Lumongo.GetFieldNamesRequest.newBuilder().setIndexName(indexName).build(); Lumongo.GetFieldNamesResponse fieldNamesResponse; try { fieldNamesResponse = indexManager.getFieldNames(fieldNamesRequest); Document mongoDocument = new Document(); mongoDocument.put("index", indexName); mongoDocument.put("fields", fieldNamesResponse.getFieldNameList()); String docString = JSONSerializers.getStrict().serialize(mongoDocument); if (pretty) { docString = JsonWriter.formatJson(docString); } return Response.status(LumongoConstants.SUCCESS).entity(docString).build(); } catch (Exception e) { return Response.status(LumongoConstants.INTERNAL_ERROR).entity("Failed to fetch fields for index <" + indexName + ">: " + e.getMessage()) .build(); } } else { return Response.status(LumongoConstants.INTERNAL_ERROR).entity("No index defined").build(); } }
@GET @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response get(@Context Response response, @QueryParam(LumongoConstants.PRETTY) boolean pretty) { try { Document mongoDocument = new Document(); mongoDocument.put("indexBlockSize", indexManager.getClusterConfig().getIndexBlockSize()); mongoDocument.put("maxIndexBlockCount", indexManager.getClusterConfig().getMaxIndexBlocks()); mongoDocument.put("currentIndexBlockCount", MongoFile.getCacheSize()); Runtime runtime = Runtime.getRuntime(); mongoDocument.put("jvmUsedMemoryMB", (runtime.totalMemory() - runtime.freeMemory()) / MB); mongoDocument.put("jvmFreeMemoryMB", runtime.freeMemory() / MB); mongoDocument.put("jvmTotalMemoryMB", runtime.totalMemory() / MB); mongoDocument.put("jvmMaxMemoryMB", runtime.maxMemory() / MB); String docString = JSONSerializers.getStrict().serialize(mongoDocument); if (pretty) { docString = JsonWriter.formatJson(docString); } return Response.status(LumongoConstants.SUCCESS).entity(docString).build(); } catch (Exception e) { return Response.status(LumongoConstants.INTERNAL_ERROR).entity("Failed to get cluster membership: " + e.getMessage()).build(); } }
String docString = JSONSerializers.getStrict().serialize(mongoDocument);
String docString = JSONSerializers.getStrict().serialize(document);