Example usage for com.fasterxml.jackson.databind.node IntNode IntNode

List of usage examples for com.fasterxml.jackson.databind.node IntNode IntNode

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node IntNode IntNode.

Prototype

public IntNode(int paramInt) 

Source Link

Usage

From source file:com.mapr.synth.samplers.CommonPointOfCompromise.java

@Override
public JsonNode sample() {
    ArrayNode r = nodeFactory.arrayNode();

    double t = start;
    double averageInterval = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS) / transactionsPerDay.nextDouble();
    Exponential interval = new Exponential(1 / averageInterval, gen);

    Date date = new Date();
    boolean compromised = false;
    while (t < end) {
        ObjectNode transaction = new ObjectNode(nodeFactory);
        t += interval.nextDouble();//from   www.j a  va 2 s  .  c  o m
        date.setTime((long) t);
        transaction.set("timestamp", new LongNode((long) (t / 1000)));
        transaction.set("date", new TextNode(df.format(date)));
        Integer merchantId = merchant.sample();
        transaction.set("merchant", new IntNode(merchantId));

        if (merchantId == 0 && t >= compromiseStart && t < compromiseEnd) {
            compromised = true;
            transaction.set("compromise", new IntNode(1));
        } else {
            transaction.set("compromise", new IntNode(0));
        }

        if (t > exploitEnd) {
            compromised = false;
        }

        double pFraud;
        if (t >= exploitStart && compromised) {
            pFraud = compromisedFraudRate;
        } else {
            pFraud = uncompromisedFraudRate;
        }

        transaction.set("fraud", new IntNode((gen.nextDouble() < pFraud) ? 1 : 0));

        r.add(transaction);
    }
    return r;
}

From source file:com.mapr.synth.samplers.VinSampler.java

@Override
public JsonNode sample() {
    ObjectNode r = new ObjectNode(nodeFactory);

    String manufacturer = randomCode(legalCodes);
    String restraint = randomCode(restraintCodes);

    int year = randomCode(legalYears);
    String yearCode = computeYearCode(year);
    int sequence = sequenceCounter.incrementAndGet();

    String front;/*from  w ww.ja  va 2s .c o  m*/
    String plant;

    String make = makes.get(manufacturer);

    switch (make) {
    case "Ford": {
        String model = randomCode(fordModelCodes);
        String engine = randomCode(fordEngineCodes);
        plant = randomCode(fordPlantCodes);
        front = pad(manufacturer, 3, "AAAAAAAAAAAAAAAAAA") + restraint + pad(model, 3, "0000000000000000")
                + engine;
        if (verbose) {
            r.set("model", new TextNode(fordModels.get(model)));
            r.set("engine", new TextNode(fordEngines.get(engine)));
        }
        break;
    }
    case "BMW":
    case "BMW M": {
        String model = randomCode(bmwModelCodes);
        plant = randomCode(bmwPlantCodes);
        front = pad(manufacturer, 3, "AAAAAAAAAAAAAAAAAA") + restraint + model;
        if (verbose) {
            r.set("model", new TextNode(bmwModels.get(model)));
            r.set("plant", new TextNode(bmwPlants.get(plant)));
        }
        break;
    }
    default: {
        String model = gibberish(4);
        plant = gibberish(1);
        front = pad(manufacturer, 3, "AAAAAAAAAAAAAAAAAA") + restraint + model;
        break;
    }
    }
    String check = "0";

    String rawVin = front + check + yearCode + plant + String.format("%06d", sequence);
    String vin = addCheckDigit(rawVin);

    if (verbose) {
        r.set("VIN", new TextNode(vin));
        r.set("manufacturer", new TextNode(makes.get(manufacturer)));
        r.set("year", new IntNode(year));
    } else {
        return new TextNode(vin);
    }
    return r;
}

From source file:com.turn.shapeshifter.SchemaSerializer.java

/**
 * Returns the JSON representation of the value of a message's field.
 *
 * @param value the value to represent in JSON
 * @param field the descriptor of the value's field.
 * @param schemas a container for object schemas to use for formatting fields that refer to
 *                  other messages/*from w w  w  .  ja  v  a2s  .  c  om*/
 * @throws SerializationException
 */
private JsonNode serializeValue(Object value, FieldDescriptor field, ReadableSchemaRegistry schemas)
        throws SerializationException {
    JsonNode valueNode = NullNode.instance;
    if (schema.getTransforms().containsKey(field.getName())) {
        return schema.getTransforms().get(field.getName()).serialize(value);
    }
    switch (field.getType()) {
    case BOOL:
        valueNode = BooleanNode.valueOf((Boolean) value);
        break;
    case BYTES:
        break;
    case DOUBLE:
        valueNode = new DoubleNode((Double) value);
        break;
    case ENUM:
        EnumValueDescriptor enumValueDescriptor = (EnumValueDescriptor) value;
        String enumValue = NamedSchema.PROTO_ENUM_CASE_FORMAT.to(schema.getEnumCaseFormat(),
                enumValueDescriptor.getName());
        valueNode = new TextNode(enumValue);
        break;
    case FLOAT:
        valueNode = new DoubleNode((Float) value);
        break;
    case GROUP:
        break;
    case FIXED32:
    case INT32:
    case SFIXED32:
    case SINT32:
    case UINT32:
        valueNode = new IntNode((Integer) value);
        break;
    case FIXED64:
    case INT64:
    case SFIXED64:
    case SINT64:
    case UINT64:
        valueNode = new LongNode((Long) value);
        break;
    case MESSAGE:
        Message messageValue = (Message) value;
        Schema subSchema = null;
        if (schema.getSubObjectsSchemas().containsKey(field.getName())) {
            String schemaName = schema.getSubObjectsSchemas().get(field.getName());
            if (schemas.contains(schemaName)) {
                subSchema = schemas.get(schemaName);
            } else {
                throw new IllegalStateException();
            }
        } else {
            try {
                subSchema = schemas.get(field.getMessageType());
            } catch (SchemaObtentionException soe) {
                throw new SerializationException(soe);
            }
        }
        valueNode = subSchema.getSerializer().serialize(messageValue, schemas);
        break;
    case STRING:
        valueNode = new TextNode((String) value);
        break;
    default:
        break;
    }
    return valueNode;
}

From source file:com.turn.shapeshifter.NamedSchemaSerializer.java

/**
 * Returns the JSON representation of the value of a message's field.
 *
 * @param value the value to represent in JSON
 * @param field the descriptor of the value's field.
 * @param schemas a container for object schemas to use for formatting
 * fields that refer to other messages//from w  ww.  j  a  va 2  s  . co m
 * @throws SerializationException
 */
private JsonNode serializeValue(Object value, FieldDescriptor field, ReadableSchemaRegistry schemas)
        throws SerializationException {
    JsonNode valueNode = NullNode.instance;
    if (schema.getTransforms().containsKey(field.getName())) {
        return schema.getTransforms().get(field.getName()).serialize(value);
    }
    switch (field.getType()) {
    case BOOL:
        valueNode = BooleanNode.valueOf((Boolean) value);
        break;
    case BYTES:
        byte[] bytes = ((ByteString) value).toByteArray();
        String content = new String();
        for (int i = 0; i < bytes.length; i++) {
            content += (char) bytes[i];
        }
        valueNode = TextNode.valueOf(content);
        break;
    case DOUBLE:
        valueNode = new DoubleNode((Double) value);
        break;
    case ENUM:
        EnumValueDescriptor enumValueDescriptor = (EnumValueDescriptor) value;
        String enumValue = NamedSchema.PROTO_ENUM_CASE_FORMAT.to(schema.getEnumCaseFormat(),
                enumValueDescriptor.getName());
        valueNode = new TextNode(enumValue);
        break;
    case FLOAT:
        valueNode = new DoubleNode((Float) value);
        break;
    case GROUP:
        break;
    case FIXED32:
    case INT32:
    case SFIXED32:
    case SINT32:
    case UINT32:
        valueNode = new IntNode((Integer) value);
        break;
    case FIXED64:
    case INT64:
    case SFIXED64:
    case SINT64:
    case UINT64:
        valueNode = schema.getSurfaceLongsAsStrings() ? new TextNode(((Long) value).toString())
                : new LongNode((Long) value);
        break;
    case MESSAGE:
        Message messageValue = (Message) value;
        Schema subSchema = null;
        if (schema.getSubObjectsSchemas().containsKey(field.getName())) {
            String schemaName = schema.getSubObjectsSchemas().get(field.getName());
            if (schemas.contains(schemaName)) {
                subSchema = schemas.get(schemaName);
            } else {
                throw new IllegalStateException();
            }
        } else {
            try {
                subSchema = schemas.get(field.getMessageType());
            } catch (SchemaObtentionException soe) {
                throw new SerializationException(soe);
            }
        }
        valueNode = subSchema.getSerializer().serialize(messageValue, schemas);
        valueNode = valueNode.size() == 0 ? NullNode.instance : valueNode;
        break;
    case STRING:
        valueNode = new TextNode((String) value);
        break;
    default:
        break;
    }
    return valueNode;
}

From source file:com.squarespace.template.Context.java

public JsonNode buildNode(int value) {
    return new IntNode(value);
}

From source file:com.squarespace.template.Context.java

/**
 * Obtain the value for 'name' from the given stack frame's node.
 *///from  w  ww . j a v a 2  s .c  om
private JsonNode resolve(Object name, Frame frame) {
    // Special internal variable @index points to the array index for a
    // given stack frame.
    if (name instanceof String) {
        String strName = (String) name;

        if (strName.startsWith("@")) {
            if (name.equals("@index")) {
                if (frame.currentIndex != -1) {
                    // @index is 1-based
                    return new IntNode(frame.currentIndex + 1);
                }
                return Constants.MISSING_NODE;
            }
            JsonNode node = frame.getVar(strName);
            return (node == null) ? Constants.MISSING_NODE : node;
        }

        // Fall through
    }
    return nodePath(frame.node(), name);
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@Test
public void test_updateBucket()
        throws JsonProcessingException, IOException, InterruptedException, ExecutionException, ParseException {
    _logger.info("Starting test_updateBucket");

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();// w  ww  .j  a  v a  2 s.  c o  m
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // Create 2 buckets

    final DataBucketBean bucket1 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    final DataBucketBean bucket2 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_2);

    assertEquals(0L, (long) bucket_db.countObjects().get());
    bucket_db.storeObjects(Arrays.asList(bucket1, bucket2)).get();
    assertEquals(2L, (long) bucket_db.countObjects().get());

    //(store status)

    final DataBucketStatusBean bucket_status1 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket1._id()).with(DataBucketStatusBean::suspended, false)
            .with(DataBucketStatusBean::bucket_path, bucket1.full_name()).done().get();

    final DataBucketStatusBean bucket_status2 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket2._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket2.full_name()).done().get();

    assertEquals(0L, (long) bucket_status_db.countObjects().get());
    bucket_status_db.storeObjects(Arrays.asList(bucket_status1, bucket_status2)).get();
    assertEquals(2L, (long) bucket_status_db.countObjects().get());

    // Mod + save sources

    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1)).get(); // (onyl source 1, source 2 used to demo error)
    assertEquals(1L, (long) v1_source_db.countObjects().get());

    // Run the function under test

    // Test1 - succeeds

    final ManagementFuture<Supplier<Object>> res_1 = IkanowV1SyncService_Buckets
            .updateBucket("aleph...bucket.Template_V2_data_bucket.", bucket_db, bucket_status_db, v1_source_db);

    assertEquals(bucket1._id(), res_1.get().get());
    assertEquals(0, res_1.getManagementResults().get().size());

    assertEquals(2L, (long) bucket_db.countObjects().get());
    assertEquals(2L, (long) bucket_status_db.countObjects().get());

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(bucket1._id()).get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> updated_bucket = bucket_db.getObjectById(bucket1._id()).get();
    assertEquals("NEW DESCRIPTION", updated_bucket.get().description());
    assertEquals(bucket1.display_name(), updated_bucket.get().display_name());
    assertEquals(bucket1.tags(), updated_bucket.get().tags());
    assertEquals(bucket1.full_name(), updated_bucket.get().full_name());

    // Test 2 - error because source_2 not in DB any more

    final ManagementFuture<Supplier<Object>> res_2 = IkanowV1SyncService_Buckets.updateBucket(
            "aleph...bucket.Template_V2_data_bucket.2", bucket_db, bucket_status_db, v1_source_db);

    try {
        res_2.get();
        fail("Should have errored");
    } catch (Exception e) {
    }
    assertEquals(1, res_2.getManagementResults().get().size());
    assertEquals(false, res_2.getManagementResults().get().iterator().next().success());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@Test
public void test_createNewBucket()
        throws JsonProcessingException, IOException, InterruptedException, ExecutionException, ParseException {
    _logger.info("Starting test_createNewBucket");

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();/* w w w  .  ja  v a  2 s. com*/
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // Create 2 buckets

    assertEquals(0L, (long) bucket_db.countObjects().get());
    assertEquals(0L, (long) bucket_status_db.countObjects().get());

    // Save sources

    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1, v1_source_2)).get();
    assertEquals(2L, (long) v1_source_db.countObjects().get());

    final ManagementFuture<Supplier<Object>> f_res = IkanowV1SyncService_Buckets.createNewBucket(
            "aleph...bucket.Template_V2_data_bucket.", bucket_db, bucket_status_db, v1_source_db);

    assertEquals(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."),
            f_res.get().get());
    assertEquals(0, f_res.getManagementResults().get().size());

    assertEquals("Should have only 1 bucket", 1L, (long) bucket_db
            .countObjectsBySpec(CrudUtils.allOf(DataBucketBean.class).when("_id", f_res.get().get())).get());
    assertEquals("Should have only 1 bucket status", 1L, (long) bucket_status_db
            .countObjectsBySpec(CrudUtils.allOf(DataBucketStatusBean.class).when("_id", f_res.get().get()))
            .get());

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."))
            .get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> bucket = bucket_db.getObjectById(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."))
            .get();

    final DataBucketBean exp_bucket = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    //(check a couple of fields)
    assertEquals(exp_bucket.description(), bucket.get().description());
    assertEquals(exp_bucket.full_name(), bucket.get().full_name());

    // Error case

    final ManagementFuture<Supplier<Object>> res_2 = IkanowV1SyncService_Buckets.createNewBucket(
            "aleph...bucket.Template_V2_data_bucket.X", bucket_db, bucket_status_db, v1_source_db);
    try {
        res_2.get();
        fail("Should have errored");
    } catch (Exception e) {
    }
    assertEquals(
            "Should only have 1 management result: " + res_2.getManagementResults().get().stream()
                    .map(BasicMessageBean::message).collect(Collectors.joining()),
            1, res_2.getManagementResults().get().size());
    assertEquals(false, res_2.getManagementResults().get().iterator().next().success());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@SuppressWarnings("deprecation")
@Test//from  w ww  . j ava 2s .  co  m
public void test_puttingItAllTogether()
        throws JsonProcessingException, IOException, ParseException, InterruptedException, ExecutionException {
    _logger.info("Starting test_puttingItAllTogether");

    // Set up 3 different scenarios:
    // 1 - doc to be deleted
    // 1 - doc to be updated (+1 that would be updated if it was non-approveD)
    // 1 - doc to be created (+1 that would be created if it was non-approveD)

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 3 V1 sources (only going to save 1 of them)

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_3 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_4 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_5 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // (not saving this one it's just a template)
    ((ObjectNode) v1_source_3).set("_id", null);
    ((ObjectNode) v1_source_3).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.3"));

    // (disabled one)
    ((ObjectNode) v1_source_4).set("_id", null);
    ((ObjectNode) v1_source_4).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.4"));
    ((ObjectNode) v1_source_4).set("isApproved", BooleanNode.FALSE);

    // (disabled one with matching bucket)
    ((ObjectNode) v1_source_5).set("_id", null);
    ((ObjectNode) v1_source_5).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.5"));
    ((ObjectNode) v1_source_5).set("isApproved", BooleanNode.FALSE);

    // Create 3 buckets

    final DataBucketBean bucket1 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    final DataBucketBean bucket3 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_3);
    final DataBucketBean bucket5 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_5);

    assertEquals(0L, (long) bucket_db.countObjects().get());
    bucket_db.storeObjects(Arrays.asList(bucket1, bucket3, bucket5)).get();
    assertEquals(3L, (long) bucket_db.countObjects().get());

    //(store status)

    final DataBucketStatusBean bucket_status1 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket1._id()).with(DataBucketStatusBean::suspended, false)
            .with(DataBucketStatusBean::bucket_path, bucket1.full_name()).done().get();

    final DataBucketStatusBean bucket_status3 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket3._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket3.full_name()).done().get();

    final DataBucketStatusBean bucket_status5 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket5._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket5.full_name()).done().get();

    assertEquals(0L, (long) bucket_status_db.countObjects().get());
    bucket_status_db.storeObjects(Arrays.asList(bucket_status1, bucket_status3, bucket_status5)).get();
    assertEquals(3L, (long) bucket_status_db.countObjects().get());

    // Mod + save sources

    ((ObjectNode) v1_source_1).set("modified", new TextNode(new Date().toGMTString()));
    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    ((ObjectNode) v1_source_5).set("modified", new TextNode(new Date().toGMTString()));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1, v1_source_2, v1_source_4, v1_source_5)).get();
    assertEquals(4L, (long) v1_source_db.countObjects().get());

    // OK now fire off an instance of the runner

    IkanowV1SyncService_Buckets s1 = new IkanowV1SyncService_Buckets(
            BeanTemplateUtils.clone(_service_config).with("v1_enabled", true).done(), _service_context);

    int old = IkanowV1SyncService_Buckets._num_leader_changes;
    s1.start();
    for (int i = 0; i < 20; ++i) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
        }

        if ((old + 1) == IkanowV1SyncService_Buckets._num_leader_changes)
            break;
    }
    s1.stop();

    assertEquals(old + 1, IkanowV1SyncService_Buckets._num_leader_changes);

    // Now sleep a bit more to let the monitor have time to finish:
    Thread.sleep(3000L);

    // Check a few things have happened:

    // 1) bucket3 has been deleted

    assertEquals(false,
            bucket_db
                    .getObjectById(IkanowV1SyncService_Buckets
                            .getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket.3"))
                    .get().isPresent());

    // 2) bucket2 has been created

    assertEquals(true,
            bucket_db
                    .getObjectById(IkanowV1SyncService_Buckets
                            .getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket.2"))
                    .get().isPresent());

    // 3) bucket1 has been updated

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(bucket1._id()).get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> updated_bucket = bucket_db.getObjectById(bucket1._id()).get();
    assertEquals("NEW DESCRIPTION", updated_bucket.get().description());
    assertEquals(bucket1.display_name(), updated_bucket.get().display_name());
    assertEquals(bucket1.tags(), updated_bucket.get().tags());
    assertEquals(bucket1.full_name(), updated_bucket.get().full_name());

    // 4) Check counts quickly

    assertEquals(4L, (long) bucket_status_db.countObjects().get());
    //(this should be 3 but we're using the wrong db for maven reasons so the proxy doesn't occur)
    assertEquals(3L, (long) bucket_db.countObjects().get());
    assertEquals(4L, (long) v1_source_db.countObjects().get());

    // 5) Check v1 statuses have been updated...
    final Optional<JsonNode> res1 = v1_source_db
            .getObjectBySpec(CrudUtils.anyOf().when("key", "aleph...bucket.Template_V2_data_bucket.")).get();
    assertEquals(
            "{'harvest_status':'success','harvest_message':'[DATE] Bucket synchronization:\\n(no messages)'}",
            res1.get().get("harvest").toString().replace("\"", "'").replaceAll("\\[.*?\\]", "[DATE]"));

    final Optional<JsonNode> res2 = v1_source_db
            .getObjectBySpec(CrudUtils.anyOf().when("key", "aleph...bucket.Template_V2_data_bucket.2")).get();
    assertEquals(
            "{'harvest_status':'success','harvest_message':'[DATE] Bucket synchronization:\\n(no messages)'}",
            res2.get().get("harvest").toString().replace("\"", "'").replaceAll("\\[.*?\\]", "[DATE]"));

}