Example usage for com.fasterxml.jackson.databind.node TextNode TextNode

List of usage examples for com.fasterxml.jackson.databind.node TextNode TextNode

Introduction

In this page you can find the example usage for com.fasterxml.jackson.databind.node TextNode TextNode.

Prototype

public TextNode(String paramString) 

Source Link

Usage

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@Test
public void test_updateBucket()
        throws JsonProcessingException, IOException, InterruptedException, ExecutionException, ParseException {
    _logger.info("Starting test_updateBucket");

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();/*from   w  w w .  ja  v a2s  .  c  o m*/
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // Create 2 buckets

    final DataBucketBean bucket1 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    final DataBucketBean bucket2 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_2);

    assertEquals(0L, (long) bucket_db.countObjects().get());
    bucket_db.storeObjects(Arrays.asList(bucket1, bucket2)).get();
    assertEquals(2L, (long) bucket_db.countObjects().get());

    //(store status)

    final DataBucketStatusBean bucket_status1 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket1._id()).with(DataBucketStatusBean::suspended, false)
            .with(DataBucketStatusBean::bucket_path, bucket1.full_name()).done().get();

    final DataBucketStatusBean bucket_status2 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket2._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket2.full_name()).done().get();

    assertEquals(0L, (long) bucket_status_db.countObjects().get());
    bucket_status_db.storeObjects(Arrays.asList(bucket_status1, bucket_status2)).get();
    assertEquals(2L, (long) bucket_status_db.countObjects().get());

    // Mod + save sources

    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1)).get(); // (onyl source 1, source 2 used to demo error)
    assertEquals(1L, (long) v1_source_db.countObjects().get());

    // Run the function under test

    // Test1 - succeeds

    final ManagementFuture<Supplier<Object>> res_1 = IkanowV1SyncService_Buckets
            .updateBucket("aleph...bucket.Template_V2_data_bucket.", bucket_db, bucket_status_db, v1_source_db);

    assertEquals(bucket1._id(), res_1.get().get());
    assertEquals(0, res_1.getManagementResults().get().size());

    assertEquals(2L, (long) bucket_db.countObjects().get());
    assertEquals(2L, (long) bucket_status_db.countObjects().get());

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(bucket1._id()).get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> updated_bucket = bucket_db.getObjectById(bucket1._id()).get();
    assertEquals("NEW DESCRIPTION", updated_bucket.get().description());
    assertEquals(bucket1.display_name(), updated_bucket.get().display_name());
    assertEquals(bucket1.tags(), updated_bucket.get().tags());
    assertEquals(bucket1.full_name(), updated_bucket.get().full_name());

    // Test 2 - error because source_2 not in DB any more

    final ManagementFuture<Supplier<Object>> res_2 = IkanowV1SyncService_Buckets.updateBucket(
            "aleph...bucket.Template_V2_data_bucket.2", bucket_db, bucket_status_db, v1_source_db);

    try {
        res_2.get();
        fail("Should have errored");
    } catch (Exception e) {
    }
    assertEquals(1, res_2.getManagementResults().get().size());
    assertEquals(false, res_2.getManagementResults().get().iterator().next().success());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_LibraryJars.java

@Test
public void test_createDeleteLibraryBean()
        throws InterruptedException, ExecutionException, JsonProcessingException, IOException, ParseException {
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_share_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("social.share")).get();

    final DBCollection dbc = v1_share_db.getUnderlyingPlatformDriver(DBCollection.class, Optional.empty())
            .get();/*from ww  w . java2  s  .  com*/

    v1_share_db.deleteDatastore().get();

    IManagementCrudService<SharedLibraryBean> library_db = this._service_context.getCoreManagementDbService()
            .getSharedLibraryStore();

    library_db.deleteDatastore().get();

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final ObjectNode v1_share_1 = (ObjectNode) mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_share.json"));
    final DBObject v1_share_1_dbo = (DBObject) JSON.parse(v1_share_1.toString());
    v1_share_1_dbo.put("_id", new ObjectId(v1_share_1.get("_id").asText()));

    final String fileref_share = IOUtils
            .toString(this.getClass().getResourceAsStream("test_v1_sync_sample_share_fileref.json"), "UTF-8")
            .replace("XXX_TEMPDIR_XXX", temp_dir.replace("\\", "\\\\"));

    final ObjectNode v1_share_1b = (ObjectNode) mapper.readTree(fileref_share);
    final DBObject v1_share_1b_dbo = (DBObject) JSON.parse(v1_share_1b.toString());
    v1_share_1b_dbo.put("_id", new ObjectId(v1_share_1b.get("_id").asText()));

    assertEquals(0L, (long) v1_share_db.countObjects().get());
    dbc.save(v1_share_1_dbo);
    dbc.save(v1_share_1b_dbo);
    //v1_share_db.storeObjects(Arrays.asList(v1_share_1)).get();
    assertEquals(2L, (long) v1_share_db.countObjects().get());

    final ObjectNode v1_share_2 = (ObjectNode) mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_share.json"));
    v1_share_2.set("_id", new TextNode("655d44e3347d336b3e8c4cbe"));
    final SharedLibraryBean share2 = IkanowV1SyncService_LibraryJars.getLibraryBeanFromV1Share(v1_share_2);
    library_db.storeObject(share2).get();
    assertEquals(1L, (long) library_db.countObjects().get());

    // Create directory
    FileUtils.forceMkdir(new File(temp_dir + "/library/"));
    FileUtils.deleteQuietly(new File(temp_dir + "/library/misc"));
    assertFalse(new File(temp_dir + "/library/misc").exists());
    FileUtils.write(new File(temp_dir + "/v1_library.jar"), "test12345");

    final GridFS share_fs = Mockito.mock(GridFS.class);
    final GridFSDBFile share_file = Mockito.mock(GridFSDBFile.class, new Answer<Void>() {
        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            if (invocation.getMethod().getName().equals("writeTo")) {
                ByteArrayOutputStream baos = (ByteArrayOutputStream) invocation.getArguments()[0];
                if (null != baos) {
                    try {
                        baos.write("test123".getBytes());
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
            return null;
        }
    });
    Mockito.when(share_fs.find(Mockito.<ObjectId>any())).thenReturn(share_file);

    // Create

    Date modified_test = null;
    {
        final ManagementFuture<Supplier<Object>> res = IkanowV1SyncService_LibraryJars.createLibraryBean(
                v1_share_1.get("_id").asText(), library_db, _service_context.getStorageService(), true,
                v1_share_db, share_fs, _service_context);

        assertEquals("v1_" + v1_share_1.get("_id").asText(), res.get().get().toString());

        // Wrote DB entry
        assertTrue(library_db.getObjectById(res.get().get().toString()).get().isPresent());

        modified_test = library_db.getObjectById(res.get().get().toString()).get().get().modified();

        // Created file:
        final File f = new File(temp_dir + "/library/misc/library.jar");
        assertTrue(f.exists());
        assertEquals("test123", FileUtils.readFileToString(f));
    }

    // Create - use file reference
    {
        final ManagementFuture<Supplier<Object>> res = IkanowV1SyncService_LibraryJars.createLibraryBean(
                v1_share_1b.get("_id").asText(), library_db, _service_context.getStorageService(), true,
                v1_share_db, share_fs, _service_context);

        assertEquals("v1_" + v1_share_1b.get("_id").asText(), res.get().get().toString());

        // Wrote DB entry
        assertTrue(library_db.getObjectById(res.get().get().toString()).get().isPresent());

        modified_test = library_db.getObjectById(res.get().get().toString()).get().get().modified();

        // Created file:
        final File f = new File(temp_dir + "/library/misc/v1_library.jar");
        assertTrue(f.exists());
        assertEquals("test12345", FileUtils.readFileToString(f));
    }

    // Create duplicate

    {
        final ManagementFuture<Supplier<Object>> res = IkanowV1SyncService_LibraryJars.createLibraryBean(
                v1_share_1.get("_id").asText(), library_db, _service_context.getStorageService(), true,
                v1_share_db, share_fs, _service_context);

        try {
            res.get();
            fail("Should have thrown dup error");
        } catch (Exception e) {
        } // good
    }

    // Update

    {
        v1_share_1_dbo.put("modified", new Date());
        dbc.save(v1_share_1_dbo);

        final GridFSDBFile share_file2 = Mockito.mock(GridFSDBFile.class, new Answer<Void>() {
            @Override
            public Void answer(InvocationOnMock invocation) throws Throwable {
                if (invocation.getMethod().getName().equals("writeTo")) {
                    ByteArrayOutputStream baos = (ByteArrayOutputStream) invocation.getArguments()[0];
                    if (null != baos) {
                        try {
                            baos.write("test1234".getBytes());
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                }
                return null;
            }
        });
        Mockito.when(share_fs.find(Mockito.<ObjectId>any())).thenReturn(share_file2);

        final ManagementFuture<Supplier<Object>> res = IkanowV1SyncService_LibraryJars.createLibraryBean(
                v1_share_1.get("_id").asText(), library_db, _service_context.getStorageService(), false,
                v1_share_db, share_fs, _service_context);

        assertEquals("v1_" + v1_share_1.get("_id").asText(), res.get().get().toString());

        // Wrote DB entry
        assertTrue(library_db.getObjectById(res.get().get().toString()).get().isPresent());

        // Created file:
        final File f = new File(temp_dir + "/library/misc/library.jar");
        assertTrue(f.exists());
        assertEquals("test1234", FileUtils.readFileToString(f));

        final Date modified_2 = library_db.getObjectById(res.get().get().toString()).get().get().modified();

        assertTrue("Mod time should change " + modified_test + " < " + modified_2,
                modified_2.getTime() > modified_test.getTime());
    }

    // Delete

    {
        IkanowV1SyncService_LibraryJars.deleteLibraryBean(v1_share_1.get("_id").asText(), library_db,
                _service_context.getStorageService());

        assertFalse(library_db.getObjectById("v1_" + v1_share_1.get("_id").asText()).get().isPresent());

        final File f = new File(temp_dir + "/library/misc/library.jar");
        assertTrue(f.exists());

        IkanowV1SyncService_LibraryJars.deleteLibraryBean(v1_share_2.get("_id").asText(), library_db,
                _service_context.getStorageService());
        assertFalse(f.exists());
    }
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@Test
public void test_deleteBucket()
        throws JsonProcessingException, IOException, InterruptedException, ExecutionException, ParseException {
    _logger.info("Starting test_deleteBucket");

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();//from  w  w  w.java 2s  . com
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // Create 2 buckets

    final DataBucketBean bucket1 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    final DataBucketBean bucket2 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_2);

    assertEquals(0L, (long) bucket_db.countObjects().get());
    bucket_db.storeObjects(Arrays.asList(bucket1, bucket2)).get();
    assertEquals(2L, (long) bucket_db.countObjects().get());

    //(store status)

    final DataBucketStatusBean bucket_status1 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket1._id()).with(DataBucketStatusBean::suspended, false)
            .with(DataBucketStatusBean::bucket_path, bucket1.full_name()).done().get();

    final DataBucketStatusBean bucket_status2 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket2._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket2.full_name()).done().get();

    assertEquals(0L, (long) bucket_status_db.countObjects().get());
    bucket_status_db.storeObjects(Arrays.asList(bucket_status1, bucket_status2)).get();
    assertEquals(2L, (long) bucket_status_db.countObjects().get());

    final ManagementFuture<Boolean> f_res = IkanowV1SyncService_Buckets
            .deleteBucket("aleph...bucket.Template_V2_data_bucket.", bucket_db);

    assertEquals(true, f_res.get());
    assertEquals(0, f_res.getManagementResults().get().size());

    // Check if got deleted....

    assertEquals(false,
            bucket_db
                    .getObjectById(IkanowV1SyncService_Buckets
                            .getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."))
                    .get().isPresent());
    // (would normally test bucket status here - but it won't be changed because test uses underlying_mgmt_db as core_mgmt_db for circular dep issues in maven)
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_LibraryJars.java

@Test
public void test_puttingItAllTogether()
        throws JsonProcessingException, IOException, ParseException, InterruptedException, ExecutionException {

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_share_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("social.share")).get();

    v1_share_db.deleteDatastore().get();

    IManagementCrudService<SharedLibraryBean> library_db = this._service_context.getCoreManagementDbService()
            .getSharedLibraryStore();/* w  w  w. ja  v  a  2  s .co m*/

    library_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_share_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_share.json"));
    final JsonNode v1_share_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_share.json"));
    final JsonNode v1_share_3 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_share.json"));

    ((ObjectNode) v1_share_2).set("_id", new TextNode("655d44e3347d336b3e8c4cbe"));
    ((ObjectNode) v1_share_2).set("title", new TextNode("/app/aleph2/library/misc/library2.jar"));

    ((ObjectNode) v1_share_3).set("_id", new TextNode("755d44e3347d336b3e8c4cbe"));
    ((ObjectNode) v1_share_3).set("title", new TextNode("/app/aleph2/library/misc/library3.jar"));

    //final SharedLibraryBean share1 = IkanowV1SyncService_LibraryJars.getLibraryBeanFromV1Share(v1_share_1);
    final SharedLibraryBean share2 = IkanowV1SyncService_LibraryJars.getLibraryBeanFromV1Share(v1_share_2);
    final SharedLibraryBean share3 = IkanowV1SyncService_LibraryJars.getLibraryBeanFromV1Share(v1_share_3);

    final DBCollection dbc = v1_share_db.getUnderlyingPlatformDriver(DBCollection.class, Optional.empty())
            .get();
    final DBObject v1_share_1_dbo = (DBObject) JSON.parse(v1_share_1.toString());
    v1_share_1_dbo.put("_id", new ObjectId(v1_share_1.get("_id").asText()));
    v1_share_1_dbo.removeField("binaryId");
    final DBObject v1_share_2_dbo = (DBObject) JSON.parse(v1_share_2.toString());
    v1_share_2_dbo.put("_id", new ObjectId(v1_share_2.get("_id").asText()));
    v1_share_2_dbo.put("modified", new Date());
    v1_share_2_dbo.removeField("binaryId");
    final DBObject v1_share_3_dbo = (DBObject) JSON.parse(v1_share_3.toString());
    v1_share_3_dbo.put("_id", new ObjectId(v1_share_3.get("_id").asText()));
    v1_share_3_dbo.removeField("binaryId");

    assertEquals(0L, (long) v1_share_db.countObjects().get());
    dbc.save(v1_share_1_dbo);
    dbc.save(v1_share_2_dbo);
    assertEquals(2L, (long) v1_share_db.countObjects().get());

    // Store the buckets

    assertEquals(0L, (long) library_db.countObjects().get());
    library_db.storeObjects(Arrays.asList(share2, share3)).get();
    assertEquals(2L, (long) library_db.countObjects().get());

    // OK now fire off an instance of the runner

    IkanowV1SyncService_LibraryJars s1 = new IkanowV1SyncService_LibraryJars(
            BeanTemplateUtils.clone(_service_config).with("v1_enabled", true).done(), _service_context);

    int old = IkanowV1SyncService_LibraryJars._num_leader_changes;
    s1.start();
    for (int i = 0; i < 20; ++i) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
        }

        if ((old + 1) == IkanowV1SyncService_LibraryJars._num_leader_changes)
            break;
    }
    s1.stop();

    assertEquals(old + 1, IkanowV1SyncService_LibraryJars._num_leader_changes);

    // Now sleep a bit more to let the monitor have time to finish:
    Thread.sleep(3000L);

    // Check a few things have happened:

    // 1) share 1 was created

    assertTrue("share 1 should be created",
            library_db.getObjectById("v1_" + v1_share_1.get("_id").asText()).get().isPresent());

    // 2) share 2 should have been updated

    SharedLibraryBean updated_share_2 = library_db.getObjectById("v1_" + v1_share_2.get("_id").asText()).get()
            .get();
    assertTrue("share 2 should have been updated: ",
            updated_share_2.modified().getTime() > share2.modified().getTime());

    // 3) share 3 was deleted:

    assertFalse("share 3 should be deleted",
            library_db.getObjectById("v1_" + v1_share_3.get("_id").asText()).get().isPresent());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@Test
public void test_createNewBucket()
        throws JsonProcessingException, IOException, InterruptedException, ExecutionException, ParseException {
    _logger.info("Starting test_createNewBucket");

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();//w  w w . j  a  va 2 s  .  c o  m
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 2 V1 sources

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // Create 2 buckets

    assertEquals(0L, (long) bucket_db.countObjects().get());
    assertEquals(0L, (long) bucket_status_db.countObjects().get());

    // Save sources

    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1, v1_source_2)).get();
    assertEquals(2L, (long) v1_source_db.countObjects().get());

    final ManagementFuture<Supplier<Object>> f_res = IkanowV1SyncService_Buckets.createNewBucket(
            "aleph...bucket.Template_V2_data_bucket.", bucket_db, bucket_status_db, v1_source_db);

    assertEquals(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."),
            f_res.get().get());
    assertEquals(0, f_res.getManagementResults().get().size());

    assertEquals("Should have only 1 bucket", 1L, (long) bucket_db
            .countObjectsBySpec(CrudUtils.allOf(DataBucketBean.class).when("_id", f_res.get().get())).get());
    assertEquals("Should have only 1 bucket status", 1L, (long) bucket_status_db
            .countObjectsBySpec(CrudUtils.allOf(DataBucketStatusBean.class).when("_id", f_res.get().get()))
            .get());

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."))
            .get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> bucket = bucket_db.getObjectById(
            IkanowV1SyncService_Buckets.getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket."))
            .get();

    final DataBucketBean exp_bucket = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    //(check a couple of fields)
    assertEquals(exp_bucket.description(), bucket.get().description());
    assertEquals(exp_bucket.full_name(), bucket.get().full_name());

    // Error case

    final ManagementFuture<Supplier<Object>> res_2 = IkanowV1SyncService_Buckets.createNewBucket(
            "aleph...bucket.Template_V2_data_bucket.X", bucket_db, bucket_status_db, v1_source_db);
    try {
        res_2.get();
        fail("Should have errored");
    } catch (Exception e) {
    }
    assertEquals(
            "Should only have 1 management result: " + res_2.getManagementResults().get().stream()
                    .map(BasicMessageBean::message).collect(Collectors.joining()),
            1, res_2.getManagementResults().get().size());
    assertEquals(false, res_2.getManagementResults().get().iterator().next().success());
}

From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java

@Test
public void test_handleCustomDeduplication() {

    TestDedupEnrichmentModule test_module = new TestDedupEnrichmentModule();

    // dummy context
    DeduplicationEnrichmentContext test_context = Mockito.mock(DeduplicationEnrichmentContext.class);
    Mockito.doNothing().when(test_context).resetMutableState(Mockito.any(), Mockito.any());
    Mockito.when(test_context.getObjectIdsToDelete())
            .thenReturn(Arrays.asList((JsonNode) new TextNode("a")).stream());

    _called_batch.set(0);/*from   ww  w. ja  v  a 2 s .  c  om*/
    assertEquals(0L, _called_batch.get());

    final ObjectNode test1 = _mapper.createObjectNode();
    test1.put("field1", "test1");
    final ObjectNode test2 = _mapper.createObjectNode();
    test2.put("field2", "test2");

    final List<Tuple3<Long, IBatchRecord, ObjectNode>> batch = Arrays
            .<JsonNode>asList(test1, test2).stream().map(j -> Tuples._3T(0L,
                    (IBatchRecord) new BatchRecordUtils.JsonBatchRecord(j), _mapper.createObjectNode()))
            .collect(Collectors.toList());

    DeduplicationService.handleCustomDeduplication(Optional.empty(),
            batch.stream().collect(Collectors.toList()), Arrays.asList(test2), _mapper.createObjectNode());

    assertEquals(0L, _called_batch.get());

    final Stream<JsonNode> ret_val = DeduplicationService.handleCustomDeduplication(
            Optional.of(Tuples._2T(test_module, test_context)), batch.stream().collect(Collectors.toList()),
            Arrays.asList(test2), _mapper.createObjectNode());

    assertEquals(3L, _called_batch.get());

    assertEquals(1L, ret_val.count());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_Buckets.java

@SuppressWarnings("deprecation")
@Test//from   ww w  .  jav  a 2s . c  o m
public void test_puttingItAllTogether()
        throws JsonProcessingException, IOException, ParseException, InterruptedException, ExecutionException {
    _logger.info("Starting test_puttingItAllTogether");

    // Set up 3 different scenarios:
    // 1 - doc to be deleted
    // 1 - doc to be updated (+1 that would be updated if it was non-approveD)
    // 1 - doc to be created (+1 that would be created if it was non-approveD)

    @SuppressWarnings("unchecked")
    ICrudService<JsonNode> v1_source_db = this._service_context.getCoreManagementDbService()
            .getUnderlyingPlatformDriver(ICrudService.class, Optional.of("ingest.source")).get();

    v1_source_db.deleteDatastore().get();

    IManagementCrudService<DataBucketBean> bucket_db = this._service_context.getCoreManagementDbService()
            .getDataBucketStore();
    bucket_db.deleteDatastore().get();

    IManagementCrudService<DataBucketStatusBean> bucket_status_db = this._service_context
            .getCoreManagementDbService().getDataBucketStatusStore();
    bucket_status_db.deleteDatastore().get();

    // Create 3 V1 sources (only going to save 1 of them)

    final ObjectMapper mapper = BeanTemplateUtils.configureMapper(Optional.empty());

    final JsonNode v1_source_1 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_2 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_3 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_4 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));
    final JsonNode v1_source_5 = mapper
            .readTree(this.getClass().getResourceAsStream("test_v1_sync_sample_source.json"));

    ((ObjectNode) v1_source_2).set("_id", null);
    ((ObjectNode) v1_source_2).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.2"));

    // (not saving this one it's just a template)
    ((ObjectNode) v1_source_3).set("_id", null);
    ((ObjectNode) v1_source_3).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.3"));

    // (disabled one)
    ((ObjectNode) v1_source_4).set("_id", null);
    ((ObjectNode) v1_source_4).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.4"));
    ((ObjectNode) v1_source_4).set("isApproved", BooleanNode.FALSE);

    // (disabled one with matching bucket)
    ((ObjectNode) v1_source_5).set("_id", null);
    ((ObjectNode) v1_source_5).set("key", new TextNode("aleph...bucket.Template_V2_data_bucket.5"));
    ((ObjectNode) v1_source_5).set("isApproved", BooleanNode.FALSE);

    // Create 3 buckets

    final DataBucketBean bucket1 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_1);
    final DataBucketBean bucket3 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_3);
    final DataBucketBean bucket5 = IkanowV1SyncService_Buckets.getBucketFromV1Source(v1_source_5);

    assertEquals(0L, (long) bucket_db.countObjects().get());
    bucket_db.storeObjects(Arrays.asList(bucket1, bucket3, bucket5)).get();
    assertEquals(3L, (long) bucket_db.countObjects().get());

    //(store status)

    final DataBucketStatusBean bucket_status1 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket1._id()).with(DataBucketStatusBean::suspended, false)
            .with(DataBucketStatusBean::bucket_path, bucket1.full_name()).done().get();

    final DataBucketStatusBean bucket_status3 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket3._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket3.full_name()).done().get();

    final DataBucketStatusBean bucket_status5 = BeanTemplateUtils.build(DataBucketStatusBean.class)
            .with(DataBucketStatusBean::_id, bucket5._id()).with(DataBucketStatusBean::suspended, true)
            .with(DataBucketStatusBean::bucket_path, bucket5.full_name()).done().get();

    assertEquals(0L, (long) bucket_status_db.countObjects().get());
    bucket_status_db.storeObjects(Arrays.asList(bucket_status1, bucket_status3, bucket_status5)).get();
    assertEquals(3L, (long) bucket_status_db.countObjects().get());

    // Mod + save sources

    ((ObjectNode) v1_source_1).set("modified", new TextNode(new Date().toGMTString()));
    ((ObjectNode) v1_source_1).set("searchCycle_secs", new IntNode(-1));
    ((ObjectNode) v1_source_1).set("description", new TextNode("NEW DESCRIPTION"));

    ((ObjectNode) v1_source_5).set("modified", new TextNode(new Date().toGMTString()));

    assertEquals(0L, (long) v1_source_db.countObjects().get());
    v1_source_db.storeObjects(Arrays.asList(v1_source_1, v1_source_2, v1_source_4, v1_source_5)).get();
    assertEquals(4L, (long) v1_source_db.countObjects().get());

    // OK now fire off an instance of the runner

    IkanowV1SyncService_Buckets s1 = new IkanowV1SyncService_Buckets(
            BeanTemplateUtils.clone(_service_config).with("v1_enabled", true).done(), _service_context);

    int old = IkanowV1SyncService_Buckets._num_leader_changes;
    s1.start();
    for (int i = 0; i < 20; ++i) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
        }

        if ((old + 1) == IkanowV1SyncService_Buckets._num_leader_changes)
            break;
    }
    s1.stop();

    assertEquals(old + 1, IkanowV1SyncService_Buckets._num_leader_changes);

    // Now sleep a bit more to let the monitor have time to finish:
    Thread.sleep(3000L);

    // Check a few things have happened:

    // 1) bucket3 has been deleted

    assertEquals(false,
            bucket_db
                    .getObjectById(IkanowV1SyncService_Buckets
                            .getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket.3"))
                    .get().isPresent());

    // 2) bucket2 has been created

    assertEquals(true,
            bucket_db
                    .getObjectById(IkanowV1SyncService_Buckets
                            .getBucketIdFromV1SourceKey("aleph...bucket.Template_V2_data_bucket.2"))
                    .get().isPresent());

    // 3) bucket1 has been updated

    final Optional<DataBucketStatusBean> status = bucket_status_db.getObjectById(bucket1._id()).get();
    assertEquals(true, status.get().suspended());

    final Optional<DataBucketBean> updated_bucket = bucket_db.getObjectById(bucket1._id()).get();
    assertEquals("NEW DESCRIPTION", updated_bucket.get().description());
    assertEquals(bucket1.display_name(), updated_bucket.get().display_name());
    assertEquals(bucket1.tags(), updated_bucket.get().tags());
    assertEquals(bucket1.full_name(), updated_bucket.get().full_name());

    // 4) Check counts quickly

    assertEquals(4L, (long) bucket_status_db.countObjects().get());
    //(this should be 3 but we're using the wrong db for maven reasons so the proxy doesn't occur)
    assertEquals(3L, (long) bucket_db.countObjects().get());
    assertEquals(4L, (long) v1_source_db.countObjects().get());

    // 5) Check v1 statuses have been updated...
    final Optional<JsonNode> res1 = v1_source_db
            .getObjectBySpec(CrudUtils.anyOf().when("key", "aleph...bucket.Template_V2_data_bucket.")).get();
    assertEquals(
            "{'harvest_status':'success','harvest_message':'[DATE] Bucket synchronization:\\n(no messages)'}",
            res1.get().get("harvest").toString().replace("\"", "'").replaceAll("\\[.*?\\]", "[DATE]"));

    final Optional<JsonNode> res2 = v1_source_db
            .getObjectBySpec(CrudUtils.anyOf().when("key", "aleph...bucket.Template_V2_data_bucket.2")).get();
    assertEquals(
            "{'harvest_status':'success','harvest_message':'[DATE] Bucket synchronization:\\n(no messages)'}",
            res2.get().get("harvest").toString().replace("\"", "'").replaceAll("\\[.*?\\]", "[DATE]"));

}

From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java

@SuppressWarnings("unchecked")
@Test/* w w  w .ja  v  a  2s  .  c  om*/
public void test_handleDuplicateRecord() {

    final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class);

    Mockito.when(enrich_context.emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class),
            Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)))
            .thenReturn(Validation.success(_mapper.createObjectNode()));

    TestDedupEnrichmentModule test_module = new TestDedupEnrichmentModule();

    final String ts_field = "@timestamp";

    final ObjectNode old_json = _mapper.createObjectNode();
    old_json.put("_id", "old_record");
    old_json.put("@timestamp", 0L);
    old_json.put("url", "test");

    final ObjectNode new_json = _mapper.createObjectNode();
    new_json.put("@timestamp", 1L);
    new_json.put("url", "test");

    final ObjectNode new_json_but_same_time = _mapper.createObjectNode();
    new_json_but_same_time.put("@timestamp", 0L);
    new_json_but_same_time.put("url", "test");

    Tuple3<Long, IBatchRecord, ObjectNode> new_record = Tuples._3T(0L,
            new BatchRecordUtils.JsonBatchRecord(new_json), _mapper.createObjectNode());
    Tuple3<Long, IBatchRecord, ObjectNode> new_record_but_same_time = Tuples._3T(0L,
            new BatchRecordUtils.JsonBatchRecord(new_json_but_same_time), _mapper.createObjectNode());

    new_record._2().getContent(); //(code coverage!)

    final TextNode key = new TextNode("url");

    LinkedHashMap<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> mutable_obj_map = new LinkedHashMap<>();

    final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records = Stream.of(new_record)
            .collect(Collectors.toCollection(LinkedList::new));
    final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records_but_same_time = Stream
            .of(new_record_but_same_time).collect(Collectors.toCollection(LinkedList::new));

    // Simple case Leave policy
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.leave).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // No annotations/mutations
        assertEquals("{}", new_record._3().toString());
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size());
    }
    // Simple case update policy - time updates
    final Consumer<Boolean> test_time_updates = delete_unhandled -> {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        // (add the same object twice to test the "return ids to delete" functionality)
        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records,
                Arrays.asList(old_json, old_json), key, mutable_obj_map);
        if (delete_unhandled) {
            assertEquals(Arrays.asList("old_record"), ret_val.sorted()
                    .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList()));
        } else {
            assertEquals(0L, ret_val.count());
        }

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // _id
        assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString());
        // Object removed from mutable map
        assertEquals(2, mutable_obj_map.size());
    };
    test_time_updates.accept(true);
    test_time_updates.accept(false);

    // Simple case update policy - times the same
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // No annotations/mutations
        assertEquals("{}", new_record_but_same_time._3().toString());
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size());
    }
    // overwrite
    final Consumer<Boolean> test_overwrites = delete_unhandled -> {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records,
                Arrays.asList(old_json, old_json), key, mutable_obj_map);
        if (delete_unhandled) {
            assertEquals(Arrays.asList("old_record"), ret_val.sorted()
                    .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList()));
        } else {
            assertEquals(0L, ret_val.count());
        }

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // _id
        assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString());
        // Object removed from mutable map
        assertEquals(2, mutable_obj_map.size());
    };
    test_overwrites.accept(true);
    test_overwrites.accept(false);

    //(check ignores times)
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // _id
        assertEquals("{\"_id\":\"old_record\"}", new_record_but_same_time._3().toString());
        // Object removed from mutable map
        assertEquals(2, mutable_obj_map.size());
    }
    // custom
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(2, _called_batch.get()); //(old + new)
        // _id
        assertEquals("{}", new_record._3().toString()); // up to the custom code to do this
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit)
    }
    //(check ignores times)
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(2, _called_batch.get()); //(old + new)
        // _id
        assertEquals("{}", new_record_but_same_time._3().toString()); // up to the custom code to do this
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit)
    }
    // Simple case *custom* update policy - time updates
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(2, _called_batch.get()); //(old + new)
        // _id
        assertEquals("{}", new_record._3().toString()); // up to the custom code to do this
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit)
    }
    // Simple case *custom* update policy - times the same
    {
        //(reset)
        mutable_obj_map.clear();
        mutable_obj_map.put(new TextNode("never_changed"), new_records);
        mutable_obj_map.put(new TextNode("url"), new_records);
        assertEquals(2, mutable_obj_map.size());
        new_record._3().removeAll();
        new_record_but_same_time._3().removeAll();
        _called_batch.set(0);

        DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class)
                .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update)
                .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get();
        DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config,
                j -> Optional.empty());

        final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config,
                Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time,
                Arrays.asList(old_json), key, mutable_obj_map);
        assertEquals(0L, ret_val.count());

        // Nothing emitted
        Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class),
                Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class),
                Mockito.any(Optional.class));
        // No custom processing performed
        assertEquals(0, _called_batch.get());
        // No annotations/mutations
        assertEquals("{}", new_record_but_same_time._3().toString());
        // Object removed from mutable map
        assertEquals(1, mutable_obj_map.size());
    }

}

From source file:de.fhg.fokus.odp.registry.ckan.ODRClientImpl.java

@Override
public boolean deleteMetadata(User user, String metadataName) {
    ObjectNode deleteParam = OM.createObjectNode();
    deleteParam.set("id", new TextNode(metadataName));

    try {/*from w  w w  . j a  v  a  2 s.c  om*/
        String apikey = ((UserImpl) user).getApikey();
        Response response = action.deleteMetadata(apikey, deleteParam);
        if (response.getStatusInfo() == Status.OK) {
            JsonNode node = response.readEntity(JsonNode.class);
            if (isSuccess(node)) {
                return true;
            } else {
                log.error("Delete failed, non-OK response from CKAN. Metadata: " + metadataName);
            }
        }
        return false;
    } catch (Exception e) {
        log.error("Could not delete metadata: " + metadataName);
        e.printStackTrace();
        return false;
    }
}

From source file:de.fhg.fokus.odp.registry.ckan.ODRClientImpl.java

@Override
public boolean deleteUser(User user) {
    ObjectNode deleteParam = OM.createObjectNode();
    deleteParam.set("id", new TextNode(user.getId()));

    try {/*from w w w .j  a va  2s.  c  om*/
        Response response = action.deleteUser(authorizationKey, deleteParam);
        if (response.getStatusInfo() == Status.OK) {
            JsonNode node = response.readEntity(JsonNode.class);
            if (isSuccess(node)) {
                return true;
            } else {
                log.error("Delete failed, non-OK response from CKAN. User: " + user.getName());
            }
        }
        return false;
    } catch (Exception e) {
        log.error("Could not delete User: " + user.getName());
        e.printStackTrace();
        return false;
    }
}