Example usage for org.apache.hadoop.fs FileContext getLocalFSFileContext

List of usage examples for org.apache.hadoop.fs FileContext getLocalFSFileContext

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileContext getLocalFSFileContext.

Prototype

public static FileContext getLocalFSFileContext() throws UnsupportedFileSystemException 

Source Link

Usage

From source file:com.ikanow.aleph2.analytics.hadoop.assets.VerySimpleLocalExample.java

License:Apache License

@SuppressWarnings({ "deprecation", "unchecked", "rawtypes" })
@Test//from   www . j  a  va2  s . co m
public void test_localHadoopLaunch()
        throws IOException, IllegalStateException, ClassNotFoundException, InterruptedException {

    // 0) Setup the temp dir 
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;
    //final Path tmp_path = FileContext.getLocalFSFileContext().makeQualified(new Path(temp_dir));
    final Path tmp_path2 = FileContext.getLocalFSFileContext()
            .makeQualified(new Path(temp_dir + "/tmp_output"));
    try {
        FileContext.getLocalFSFileContext().delete(tmp_path2, true);
    } catch (Exception e) {
    } // (just doesn't exist yet)

    // 1) Setup config with local mode
    final Configuration config = new Configuration();
    config.setBoolean("mapred.used.genericoptionsparser", true); // (just stops an annoying warning from appearing)
    config.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
    config.set("mapred.job.tracker", "local");
    config.set("fs.defaultFS", "local");
    config.unset("mapreduce.framework.name");

    // If running locally, turn "snappy" off - tomcat isn't pointing its native library path in the right place
    config.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.DefaultCodec");

    // 2) Build job and do more setup using the Job API
    //TODO: not sure why this is deprecated, it doesn't seem to be in v1? We do need to move to JobConf at some point, but I ran into some 
    // issues when trying to do everything I needed to for V1, so seems expedient to start here and migrate away
    final Job hj = new Job(config); // (NOTE: from here, changes to config are ignored)

    // Input format:
    //TOOD: fails because of guava issue, looks like we'll need to move to 2.7 and check it works with 2.5.x server?
    //TextInputFormat.addInputPath(hj, tmp_path);
    //hj.setInputFormatClass((Class<? extends InputFormat>) Class.forName ("org.apache.hadoop.mapreduce.lib.input.TextInputFormat"));
    hj.setInputFormatClass(TestInputFormat.class);

    // Output format:
    hj.setOutputFormatClass((Class<? extends OutputFormat>) Class
            .forName("org.apache.hadoop.mapreduce.lib.output.TextOutputFormat"));
    TextOutputFormat.setOutputPath(hj, tmp_path2);

    // Mapper etc (combiner/reducer are similar)
    hj.setMapperClass(TestMapper.class);
    hj.setOutputKeyClass(Text.class);
    hj.setOutputValueClass(Text.class);
    hj.setNumReduceTasks(0); // (disable reducer for now)

    hj.setJar("test");

    try {
        hj.submit();
    } catch (UnsatisfiedLinkError e) {
        throw new RuntimeException(
                "This is a windows/hadoop compatibility problem - adding the hadoop-commons in the misc_test_assets subdirectory to the top of the classpath should resolve it (and does in V1), though I haven't yet made that work with Aleph2",
                e);
    }
    //hj.getJobID().toString();
    while (!hj.isComplete()) {
        Thread.sleep(1000);
    }
    assertTrue("Finished successfully", hj.isSuccessful());
}

From source file:com.ikanow.aleph2.analytics.hadoop.services.BeJobLauncher.java

License:Open Source License

/** Cache the system and user classpaths
 * @param job/*from   ww w  . j  a v  a 2 s. c  om*/
 * @param context
 * @throws IOException 
 * @throws ExecutionException 
 * @throws InterruptedException 
 * @throws IllegalArgumentException 
 */
protected static void cacheJars(final Job job, final DataBucketBean bucket, final IAnalyticsContext context)
        throws IllegalArgumentException, InterruptedException, ExecutionException, IOException {
    final FileContext fc = context.getServiceContext().getStorageService()
            .getUnderlyingPlatformDriver(FileContext.class, Optional.empty()).get();
    final String rootPath = context.getServiceContext().getStorageService().getRootPath();

    // Aleph2 libraries: need to cache them
    context.getAnalyticsContextLibraries(Optional.empty()).stream().map(f -> new File(f))
            .map(f -> Tuples._2T(f, new Path(rootPath + "/" + f.getName()))).map(Lambdas.wrap_u(f_p -> {
                final FileStatus fs = Lambdas.get(() -> {
                    try {
                        return fc.getFileStatus(f_p._2());
                    } catch (Exception e) {
                        return null;
                    }
                });
                if (null == fs) { //cache doesn't exist
                    // Local version
                    Path srcPath = FileContext.getLocalFSFileContext()
                            .makeQualified(new Path(f_p._1().toString()));
                    fc.util().copy(srcPath, f_p._2());
                }
                return f_p._2();
            })).forEach(Lambdas.wrap_consumer_u(path -> job.addFileToClassPath(path)));
    ;

    // User libraries: this is slightly easier since one of the 2 keys
    // is the HDFS path (the other is the _id)
    context.getAnalyticsLibraries(Optional.of(bucket), bucket.analytic_thread().jobs()).get().entrySet()
            .stream().map(kv -> kv.getKey()).filter(path -> path.startsWith(rootPath))
            .forEach(Lambdas.wrap_consumer_u(path -> job.addFileToClassPath(new Path(path))));
    ;
}

From source file:com.ikanow.aleph2.core.shared.utils.TestClassloaderUtils.java

License:Apache License

@Test
public void test_classLoading_primaryLib() throws UnsupportedFileSystemException {

    try {/*  w w w  . j av a  2s  . c  o m*/
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }

    final String pathname = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example.jar";
    final Path path = new Path(pathname);
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(path);

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class,
                    "com.ikanow.aleph2.test.example.ExampleHarvestTechnology", Optional.of(path2.toString()),
                    Collections.emptyList(), "test1", new TestMessageBean());

    if (ret_val.isFail()) {
        System.out.println("About to crash with: " + ret_val.fail().message());
    }
    assertEquals(true, ret_val.success()
            .canRunOnThisNode(BeanTemplateUtils.build(DataBucketBean.class).done().get(), null));

    try {
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("STILL! Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }

    // Check if it gets cached:

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val_2 = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class,
                    "com.ikanow.aleph2.test.example.ExampleHarvestTechnology", Optional.of(path2.toString()),
                    Collections.emptyList(), "test1", new TestMessageBean());

    assertEquals(ret_val.success().getClass().getClassLoader(),
            ret_val_2.success().getClass().getClassLoader());

    // Clear cache and check we get another classloader:

    ClassloaderUtils.clearCache();

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val_3 = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class,
                    "com.ikanow.aleph2.test.example.ExampleHarvestTechnology", Optional.of(path2.toString()),
                    Collections.emptyList(), "test1", new TestMessageBean());

    assertNotEquals(ret_val.success().getClass().getClassLoader(),
            ret_val_3.success().getClass().getClassLoader());
}

From source file:com.ikanow.aleph2.core.shared.utils.TestClassloaderUtils.java

License:Apache License

@Test
public void test_classLoading_secondaryLib() throws UnsupportedFileSystemException {

    try {//from   w  w  w  .  jav a2 s. co m
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }

    final String pathname = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example.jar";
    final Path path = new Path(pathname);
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(path);

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class,
                    "com.ikanow.aleph2.test.example.ExampleHarvestTechnology", Optional.empty(),
                    Arrays.asList(path2.toString()), "test1", new TestMessageBean());

    if (ret_val.isFail()) {
        System.out.println("About to crash with: " + ret_val.fail().message());
    }
    assertEquals(true, ret_val.success()
            .canRunOnThisNode(BeanTemplateUtils.build(DataBucketBean.class).done().get(), null));

    try {
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("STILL! Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }
}

From source file:com.ikanow.aleph2.core.shared.utils.TestClassloaderUtils.java

License:Apache License

@Test
public void test_classLoading_fails() throws UnsupportedFileSystemException {

    try {/* w  ww.  java2  s  . com*/
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }

    final String pathname = System.getProperty("user.dir") + "/simple-harvest-examplee-FAILS.jar";
    final Path path = new Path(pathname);
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(path);

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class,
                    "com.ikanow.aleph2.test.example.ExampleHarvestTechnology", Optional.empty(),
                    Arrays.asList(path2.toString()), "test1", new TestMessageBean());

    if (ret_val.isSuccess()) {
        System.out.println("About to crash,found class?");
    }
    BasicMessageBean error = ret_val.fail();

    assertEquals(error.command(), "TestMessageBean");
    assertEquals((double) error.date().getTime(), (double) ((new Date()).getTime()), 1000.0);
    assertEquals(error.details(), null);
    final String expected_err_fragment = "Error loading class com.ikanow.aleph2.test.example.ExampleHarvestTechnology: [org.xeustechnologies.jcl.exception.JclException: java.lang.ClassNotFoundException: com.ikanow.aleph2.test.example.ExampleHarvestTechnology: RuntimeException]";
    assertTrue("Failed error message, should contain: " + expected_err_fragment + " vs " + error.message(),
            error.message().contains(expected_err_fragment));
    assertEquals(error.message_code(), null);
    assertEquals(error.source(), "test1");
    assertEquals(error.success(), false);

}

From source file:com.ikanow.aleph2.core.shared.utils.TestClassloaderUtils.java

License:Apache License

@Test
public void test_classLoading_wrongInterface() throws UnsupportedFileSystemException {

    try {//from  ww  w  . j a v  a 2 s. c om
        Class.forName("com.ikanow.aleph2.test.example.ExampleHarvestTechnology");
        assertTrue("Should have thrown a ClassNotFoundException", false);
    } catch (ClassNotFoundException e) {
        //expected!
    }

    final String pathname = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example.jar";
    final Path path = new Path(pathname);
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(path);

    final Validation<BasicMessageBean, IHarvestTechnologyModule> ret_val = ClassloaderUtils
            .getFromCustomClasspath(IHarvestTechnologyModule.class, "java.lang.String", Optional.empty(),
                    Arrays.asList(path2.toString()), "test1", new TestMessageBean());

    if (ret_val.isSuccess()) {
        System.out.println("About to crash,found class?");
    }
    BasicMessageBean error = ret_val.fail();

    assertEquals(error.command(), "TestMessageBean");
    assertEquals((double) error.date().getTime(), (double) ((new Date()).getTime()), 1000.0);
    assertEquals(error.details(), null);
    final String expected_err_fragment = "Error: class java.lang.String is not an implementation of interface";
    assertTrue("Failed error message, should contain: " + expected_err_fragment + " vs " + error.message(),
            error.message().contains(expected_err_fragment));
    assertEquals(error.message_code(), null);
    assertEquals(error.source(), "test1");
    assertEquals(error.success(), false);

}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.TestDataBucketChangeActor.java

License:Apache License

@Test
public void test_getAnalyticsTechnology()
        throws UnsupportedFileSystemException, InterruptedException, ExecutionException {
    final DataBucketBean bucket = createBucket("test_tech_id_analytics"); //(note this also sets the analytics name in the jobs)   

    final String pathname1 = System.getProperty("user.dir") + "/misc_test_assets/simple-analytics-example.jar";
    final Path path1 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname1));
    final String pathname2 = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example2.jar";
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname2));

    List<SharedLibraryBean> lib_elements = createSharedLibraryBeans_analytics(path1, path2);

    //////////////////////////////////////////////////////

    // 1) Check - if called with an error, then just passes that error along

    final BasicMessageBean error = SharedErrorUtils.buildErrorMessage("test_source", "test_message",
            "test_error");

    final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> test1 = DataBucketAnalyticsChangeActor
            .getAnalyticsTechnology(bucket, "test_tech_id_analytics", true, Optional.empty(), Optional.empty(),
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2", _actor_context.getDataImportConfigurationBean(), Validation.fail(error));

    assertTrue("Got error back", test1.isFail());
    assertEquals("test_source", test1.fail().source());
    assertEquals("test_message", test1.fail().command());
    assertEquals("test_error", test1.fail().message());

    //////////////////////////////////////////////////////

    // 2) Check the error handling inside getAnalyticsTechnology

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test2_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_analytics_2b", Tuples._2T(null, null)).build();

    final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> test2a = DataBucketAnalyticsChangeActor
            .getAnalyticsTechnology(createBucket("test_tech_id_analytics_2a"), "test_tech_id_analytics_2a",
                    true, Optional.empty(), Optional.empty(),
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2a", _actor_context.getDataImportConfigurationBean(),
                    Validation.success(test2_input));

    assertTrue("Got error back", test2a.isFail());
    assertEquals("test_source2a", test2a.fail().source());
    assertEquals("BucketActionOfferMessage", test2a.fail().command());
    assertEquals(//w ww  . j  a  va  2  s .  c om
            ErrorUtils.get(SharedErrorUtils.SHARED_LIBRARY_NAME_NOT_FOUND, bucket.full_name(),
                    "test_tech_id_analytics_2a"), // (cloned bucket above)
            test2a.fail().message());

    final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> test2b = DataBucketAnalyticsChangeActor
            .getAnalyticsTechnology(createBucket("test_tech_id_analytics_2b"), "test_tech_id_analytics_2b",
                    true, Optional.empty(), Optional.empty(),
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2b", _actor_context.getDataImportConfigurationBean(),
                    Validation.success(test2_input));

    assertTrue("Got error back", test2b.isFail());
    assertEquals("test_source2b", test2b.fail().source());
    assertEquals("BucketActionOfferMessage", test2b.fail().command());
    assertEquals(
            ErrorUtils.get(SharedErrorUtils.SHARED_LIBRARY_NAME_NOT_FOUND, bucket.full_name(),
                    "test_tech_id_analytics_2a"), // (cloned bucket above)
            test2a.fail().message());

    //////////////////////////////////////////////////////

    // 3) OK now it will actually do something 

    final String java_name = _service_context.getGlobalProperties().local_cached_jar_dir() + File.separator
            + "test_tech_id_analytics.cache.jar";

    System.out.println(
            "Needed to delete locally cached file? " + java_name + ": " + new File(java_name).delete());

    // Requires that the file has already been cached:
    final Validation<BasicMessageBean, String> cached_file = JarCacheUtils
            .getCachedJar(_service_context.getGlobalProperties().local_cached_jar_dir(), lib_elements.get(0),
                    _service_context.getStorageService(), "test3", "test3")
            .get();

    if (cached_file.isFail()) {
        fail("About to crash with: " + cached_file.fail().message());
    }

    assertTrue("The cached file should exist: " + java_name, new File(java_name).exists());

    // OK the setup is done and validated now actually test the underlying call:

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test3_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_analytics", Tuples._2T(lib_elements.get(0), cached_file.success())).build();

    final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> test3 = DataBucketAnalyticsChangeActor
            .getAnalyticsTechnology(bucket, "test_tech_id_analytics", true, Optional.empty(), Optional.empty(),
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source3", _actor_context.getDataImportConfigurationBean(),
                    Validation.success(test3_input));

    if (test3.isFail()) {
        fail("About to crash with: " + test3.fail().message());
    }
    assertTrue("getAnalyticsTechnology call succeeded", test3.isSuccess());
    assertTrue("harvest tech created: ", test3.success() != null);
    assertEquals(lib_elements.get(0).misc_entry_point(), test3.success()._1().getClass().getName());

    // Now check with the "not just the harvest tech" flag set

    final String java_name2 = _service_context.getGlobalProperties().local_cached_jar_dir() + File.separator
            + "test_module_id.cache.jar";

    System.out.println(
            "Needed to delete locally cached file? " + java_name2 + ": " + new File(java_name2).delete());

    // Requires that the file has already been cached:
    final Validation<BasicMessageBean, String> cached_file2 = JarCacheUtils
            .getCachedJar(_service_context.getGlobalProperties().local_cached_jar_dir(), lib_elements.get(1),
                    _service_context.getStorageService(), "test3b", "test3b")
            .get();

    if (cached_file2.isFail()) {
        fail("About to crash with: " + cached_file2.fail().message());
    }

    assertTrue("The cached file exists: " + java_name, new File(java_name2).exists());

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test3b_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_analytics", Tuples._2T(lib_elements.get(0), cached_file.success()))
            .put("test_module_id", Tuples._2T(lib_elements.get(1), cached_file.success())).build();

    final Validation<BasicMessageBean, Tuple2<IAnalyticsTechnologyModule, ClassLoader>> test3b = DataBucketAnalyticsChangeActor
            .getAnalyticsTechnology(bucket, "test_tech_id_analytics", false, Optional.empty(), Optional.empty(),
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source3b", _actor_context.getDataImportConfigurationBean(),
                    Validation.success(test3b_input));

    if (test3b.isFail()) {
        fail("About to crash with: " + test3b.fail().message());
    }
    assertTrue("getAnalyticsTechnology call succeeded", test3b.isSuccess());
    assertTrue("harvest tech created: ", test3b.success() != null);
    assertEquals(lib_elements.get(0).misc_entry_point(), test3b.success()._1().getClass().getName());
}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.TestDataBucketChangeActor.java

License:Apache License

@Test
public void test_cacheJars_streamEnrichment()
        throws UnsupportedFileSystemException, InterruptedException, ExecutionException {
    try {/*from w  w  w  .  j ava2 s.c om*/
        // Preamble:
        // 0) Insert 2 library beans into the management db

        final DataBucketBean bucket = DataBucketAnalyticsChangeActor
                .convertEnrichmentToAnalyticBucket(createBucket("test_tech_id_stream"));

        final String pathname1 = System.getProperty("user.dir")
                + "/misc_test_assets/simple-harvest-example.jar";
        final Path path1 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname1));
        final String pathname2 = System.getProperty("user.dir")
                + "/misc_test_assets/simple-harvest-example2.jar";
        final Path path2 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname2));

        List<SharedLibraryBean> lib_elements = createSharedLibraryBeans_streaming(path1, path2);

        final IManagementDbService underlying_db = _service_context
                .getService(IManagementDbService.class, Optional.empty()).get();
        final IManagementCrudService<SharedLibraryBean> library_crud = underlying_db.getSharedLibraryStore();
        library_crud.deleteDatastore();
        assertEquals("Cleansed library store", 0L, (long) library_crud.countObjects().get());
        library_crud.storeObjects(lib_elements).get();

        assertEquals("Should have 4 library beans", 4L, (long) library_crud.countObjects().get());

        // 0a) Check with no streaming, gets nothing
        {
            final DataBucketBean bucket0 = DataBucketAnalyticsChangeActor
                    .convertEnrichmentToAnalyticBucket(createBucket("broken"));

            CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure = LibraryCacheUtils
                    .cacheJars(bucket0, DataBucketAnalyticsChangeActor.getQuery(bucket0, false),
                            _service_context.getCoreManagementDbService(),
                            _service_context.getGlobalProperties(), _service_context.getStorageService(),
                            _service_context, "test1_source", "test1_command");

            if (reply_structure.get().isFail()) {
                fail("About to crash with: " + reply_structure.get().fail().message());
            }
            assertTrue("cacheJars should return valid reply", reply_structure.get().isSuccess());

            final Map<String, Tuple2<SharedLibraryBean, String>> reply_map = reply_structure.get().success();

            assertEquals(0L, reply_map.size()); // (both modules, 1x for _id and 1x for name) 
        }

        // 0b) Create the more complex bucket

        final EnrichmentControlMetadataBean enrichment_module = new EnrichmentControlMetadataBean("test_name",
                Collections.emptyList(), null, true, null,
                Arrays.asList("test_tech_id_stream", "test_module_id"), null, new LinkedHashMap<>(), null);

        final DataBucketBean bucket2 = DataBucketAnalyticsChangeActor.convertEnrichmentToAnalyticBucket(
                BeanTemplateUtils.clone(bucket).with(DataBucketBean::analytic_thread, null)
                        .with(DataBucketBean::streaming_enrichment_topology, enrichment_module)
                        .with(DataBucketBean::master_enrichment_type, MasterEnrichmentType.streaming).done());

        // 1) Normal operation

        CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure = LibraryCacheUtils
                .cacheJars(bucket2, DataBucketAnalyticsChangeActor.getQuery(bucket2, false),
                        _service_context.getCoreManagementDbService(), _service_context.getGlobalProperties(),
                        _service_context.getStorageService(), _service_context, "test1_source",
                        "test1_command");

        if (reply_structure.get().isFail()) {
            fail("About to crash with: " + reply_structure.get().fail().message());
        }
        assertTrue("cacheJars should return valid reply", reply_structure.get().isSuccess());

        final Map<String, Tuple2<SharedLibraryBean, String>> reply_map = reply_structure.get().success();

        assertEquals("Should have 4 beans: " + reply_map.toString(), 4L, reply_map.size()); // (both modules, 1x for _id and 1x for name) 

        // 3) Couple of error cases:

        final EnrichmentControlMetadataBean enrichment_module2 = new EnrichmentControlMetadataBean("test_name",
                Collections.emptyList(), null, true, null,
                Arrays.asList("test_tech_id_stream", "test_module_id", "failtest"), null, new LinkedHashMap<>(),
                new LinkedHashMap<>());

        final DataBucketBean bucket3 = DataBucketAnalyticsChangeActor.convertEnrichmentToAnalyticBucket(
                BeanTemplateUtils.clone(bucket).with(DataBucketBean::analytic_thread, null)
                        .with(DataBucketBean::streaming_enrichment_topology, enrichment_module2)
                        .with(DataBucketBean::master_enrichment_type, MasterEnrichmentType.streaming).done());

        CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure3 = LibraryCacheUtils
                .cacheJars(bucket3, DataBucketAnalyticsChangeActor.getQuery(bucket3, false),
                        _service_context.getCoreManagementDbService(), _service_context.getGlobalProperties(),
                        _service_context.getStorageService(), _service_context, "test2_source",
                        "test2_command");

        assertTrue("cacheJars should return error", reply_structure3.get().isFail());
    } catch (Exception e) {
        System.out.println(ErrorUtils.getLongForm("guice? {0}", e));
        throw e;
    }
}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.TestDataBucketChangeActor.java

License:Apache License

@Test
public void test_cacheJars_batchEnrichment()
        throws UnsupportedFileSystemException, InterruptedException, ExecutionException {
    try {/*from  w  ww.  ja v  a  2s .  com*/
        // Preamble:
        // 0) Insert 2 library beans into the management db

        final DataBucketBean bucket = DataBucketAnalyticsChangeActor
                .convertEnrichmentToAnalyticBucket(createBucket("test_tech_id_batch"));

        final String pathname1 = System.getProperty("user.dir")
                + "/misc_test_assets/simple-harvest-example.jar";
        final Path path1 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname1));
        final String pathname2 = System.getProperty("user.dir")
                + "/misc_test_assets/simple-harvest-example2.jar";
        final Path path2 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname2));

        List<SharedLibraryBean> lib_elements = createSharedLibraryBeans_batch(path1, path2);

        final IManagementDbService underlying_db = _service_context
                .getService(IManagementDbService.class, Optional.empty()).get();
        final IManagementCrudService<SharedLibraryBean> library_crud = underlying_db.getSharedLibraryStore();
        library_crud.deleteDatastore();
        assertEquals("Cleansed library store", 0L, (long) library_crud.countObjects().get());
        library_crud.storeObjects(lib_elements).get();

        assertEquals("Should have 4 library beans", 4L, (long) library_crud.countObjects().get());

        // 0a) Check with no streaming, gets nothing
        {
            final DataBucketBean bucket0 = DataBucketAnalyticsChangeActor
                    .convertEnrichmentToAnalyticBucket(createBucket("broken"));

            CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure = LibraryCacheUtils
                    .cacheJars(bucket0, DataBucketAnalyticsChangeActor.getQuery(bucket0, false),
                            _service_context.getCoreManagementDbService(),
                            _service_context.getGlobalProperties(), _service_context.getStorageService(),
                            _service_context, "test1_source", "test1_command");

            if (reply_structure.get().isFail()) {
                fail("About to crash with: " + reply_structure.get().fail().message());
            }
            assertTrue("cacheJars should return valid reply", reply_structure.get().isSuccess());

            final Map<String, Tuple2<SharedLibraryBean, String>> reply_map = reply_structure.get().success();

            assertEquals(0L, reply_map.size()); // (both modules, 1x for _id and 1x for name) 
        }

        // 0b) Create the more complex bucket

        final EnrichmentControlMetadataBean enrichment_module = new EnrichmentControlMetadataBean("test_name",
                Collections.emptyList(), Collections.emptyList(), true, null,
                Arrays.asList("test_tech_id_batch", "test_module_id"), null, new LinkedHashMap<>(), null);

        final DataBucketBean bucket2 = DataBucketAnalyticsChangeActor.convertEnrichmentToAnalyticBucket(
                BeanTemplateUtils.clone(bucket).with(DataBucketBean::analytic_thread, null)
                        .with(DataBucketBean::batch_enrichment_configs, Arrays.asList(enrichment_module))
                        .with(DataBucketBean::master_enrichment_type, MasterEnrichmentType.batch).done());

        // 1) Normal operation

        CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure = LibraryCacheUtils
                .cacheJars(bucket2, DataBucketAnalyticsChangeActor.getQuery(bucket2, false),
                        _service_context.getCoreManagementDbService(), _service_context.getGlobalProperties(),
                        _service_context.getStorageService(), _service_context, "test1_source",
                        "test1_command");

        if (reply_structure.get().isFail()) {
            fail("About to crash with: " + reply_structure.get().fail().message());
        }
        assertTrue("cacheJars should return valid reply", reply_structure.get().isSuccess());

        final Map<String, Tuple2<SharedLibraryBean, String>> reply_map = reply_structure.get().success();

        assertEquals("Should have 4 beans: " + reply_map.toString(), 4L, reply_map.size()); // (both modules, 1x for _id and 1x for name) 

        // 3) Couple of error cases:

        final EnrichmentControlMetadataBean enrichment_module2 = new EnrichmentControlMetadataBean("test_name",
                Collections.emptyList(), null, true, null,
                Arrays.asList("test_tech_id_batch", "test_module_id", "failtest"), null, new LinkedHashMap<>(),
                null);

        final DataBucketBean bucket3 = DataBucketAnalyticsChangeActor.convertEnrichmentToAnalyticBucket(
                BeanTemplateUtils.clone(bucket).with(DataBucketBean::analytic_thread, null)
                        .with(DataBucketBean::batch_enrichment_configs, Arrays.asList(enrichment_module2))
                        .with(DataBucketBean::master_enrichment_type, MasterEnrichmentType.batch).done());

        CompletableFuture<Validation<BasicMessageBean, Map<String, Tuple2<SharedLibraryBean, String>>>> reply_structure3 = LibraryCacheUtils
                .cacheJars(bucket3, DataBucketAnalyticsChangeActor.getQuery(bucket3, false),
                        _service_context.getCoreManagementDbService(), _service_context.getGlobalProperties(),
                        _service_context.getStorageService(), _service_context, "test2_source",
                        "test2_command");

        assertTrue("cacheJars should return error", reply_structure3.get().isFail());
    } catch (Exception e) {
        System.out.println(ErrorUtils.getLongForm("guice? {0}", e));
        throw e;
    }
}

From source file:com.ikanow.aleph2.data_import_manager.harvest.actors.TestDataBucketChangeActor.java

License:Apache License

@Test
public void test_getHarvestTechnology()
        throws UnsupportedFileSystemException, InterruptedException, ExecutionException {
    final DataBucketBean bucket = createBucket("test_tech_id_harvest");

    final String pathname1 = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example.jar";
    final Path path1 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname1));
    final String pathname2 = System.getProperty("user.dir") + "/misc_test_assets/simple-harvest-example2.jar";
    final Path path2 = FileContext.getLocalFSFileContext().makeQualified(new Path(pathname2));

    List<SharedLibraryBean> lib_elements = createSharedLibraryBeans(path1, path2);

    //////////////////////////////////////////////////////

    // 1) Check - if called with an error, then just passes that error along

    final BasicMessageBean error = SharedErrorUtils.buildErrorMessage("test_source", "test_message",
            "test_error");

    final Validation<BasicMessageBean, IHarvestTechnologyModule> test1 = DataBucketHarvestChangeActor
            .getHarvestTechnology(bucket, true,
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2", Validation.fail(error));

    assertTrue("Got error back", test1.isFail());
    assertEquals("test_source", test1.fail().source());
    assertEquals("test_message", test1.fail().command());
    assertEquals("test_error", test1.fail().message());

    //////////////////////////////////////////////////////

    // 2) Check the error handling inside getHarvestTechnology

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test2_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_harvest_2b", Tuples._2T(null, null)).build();

    final Validation<BasicMessageBean, IHarvestTechnologyModule> test2a = DataBucketHarvestChangeActor
            .getHarvestTechnology(BeanTemplateUtils.clone(bucket)
                    .with(DataBucketBean::harvest_technology_name_or_id, "test_tech_id_harvest_2a").done(),
                    true,/*from   ww w . j  a v a  2s .c  o m*/
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2a", Validation.success(test2_input));

    assertTrue("Got error back", test2a.isFail());
    assertEquals("test_source2a", test2a.fail().source());
    assertEquals("BucketActionOfferMessage", test2a.fail().command());
    assertEquals(
            ErrorUtils.get(SharedErrorUtils.SHARED_LIBRARY_NAME_NOT_FOUND, bucket.full_name(),
                    "test_tech_id_harvest_2a"), // (cloned bucket above)
            test2a.fail().message());

    final Validation<BasicMessageBean, IHarvestTechnologyModule> test2b = DataBucketHarvestChangeActor
            .getHarvestTechnology(BeanTemplateUtils.clone(bucket)
                    .with(DataBucketBean::harvest_technology_name_or_id, "test_tech_id_harvest_2b").done(),
                    true,
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source2b", Validation.success(test2_input));

    assertTrue("Got error back", test2b.isFail());
    assertEquals("test_source2b", test2b.fail().source());
    assertEquals("BucketActionOfferMessage", test2b.fail().command());
    assertEquals(
            ErrorUtils.get(SharedErrorUtils.SHARED_LIBRARY_NAME_NOT_FOUND, bucket.full_name(),
                    "test_tech_id_harvest_2a"), // (cloned bucket above)
            test2a.fail().message());

    //////////////////////////////////////////////////////

    // 3) OK now it will actually do something 

    final String java_name = _service_context.getGlobalProperties().local_cached_jar_dir() + File.separator
            + "test_tech_id_harvest.cache.jar";

    System.out.println(
            "Needed to delete locally cached file? " + java_name + ": " + new File(java_name).delete());

    // Requires that the file has already been cached:
    final Validation<BasicMessageBean, String> cached_file = JarCacheUtils
            .getCachedJar(_service_context.getGlobalProperties().local_cached_jar_dir(), lib_elements.get(0),
                    _service_context.getStorageService(), "test3", "test3")
            .get();

    if (cached_file.isFail()) {
        fail("About to crash with: " + cached_file.fail().message());
    }

    assertTrue("The cached file exists: " + java_name, new File(java_name).exists());

    // OK the setup is done and validated now actually test the underlying call:

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test3_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_harvest", Tuples._2T(lib_elements.get(0), cached_file.success())).build();

    final Validation<BasicMessageBean, IHarvestTechnologyModule> test3 = DataBucketHarvestChangeActor
            .getHarvestTechnology(
                    BeanTemplateUtils.clone(bucket)
                            .with(DataBucketBean::harvest_technology_name_or_id, "test_tech_id_harvest").done(),
                    true,
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source3", Validation.success(test3_input));

    if (test3.isFail()) {
        fail("About to crash with: " + test3.fail().message());
    }
    assertTrue("getHarvestTechnology call succeeded", test3.isSuccess());
    assertTrue("harvest tech created: ", test3.success() != null);
    assertEquals(lib_elements.get(0).misc_entry_point(), test3.success().getClass().getName());

    // Now check with the "not just the harvest tech" flag set

    final String java_name2 = _service_context.getGlobalProperties().local_cached_jar_dir() + File.separator
            + "test_module_id.cache.jar";

    System.out.println(
            "Needed to delete locally cached file? " + java_name2 + ": " + new File(java_name2).delete());

    // Requires that the file has already been cached:
    final Validation<BasicMessageBean, String> cached_file2 = JarCacheUtils
            .getCachedJar(_service_context.getGlobalProperties().local_cached_jar_dir(), lib_elements.get(1),
                    _service_context.getStorageService(), "test3b", "test3b")
            .get();

    if (cached_file2.isFail()) {
        fail("About to crash with: " + cached_file2.fail().message());
    }

    assertTrue("The cached file exists: " + java_name, new File(java_name2).exists());

    final ImmutableMap<String, Tuple2<SharedLibraryBean, String>> test3b_input = ImmutableMap
            .<String, Tuple2<SharedLibraryBean, String>>builder()
            .put("test_tech_id_harvest", Tuples._2T(lib_elements.get(0), cached_file.success()))
            .put("test_module_id", Tuples._2T(lib_elements.get(1), cached_file.success())).build();

    final HarvestControlMetadataBean harvest_module = new HarvestControlMetadataBean("test_tech_name", true,
            null, Arrays.asList("test_module_id"), null, null);

    final Validation<BasicMessageBean, IHarvestTechnologyModule> test3b = DataBucketHarvestChangeActor
            .getHarvestTechnology(
                    BeanTemplateUtils.clone(bucket)
                            .with(DataBucketBean::harvest_technology_name_or_id, "test_tech_id_harvest")
                            .with(DataBucketBean::harvest_configs, Arrays.asList(harvest_module)).done(),
                    false,
                    new BucketActionMessage.BucketActionOfferMessage(bucket, null, Collections.emptySet()),
                    "test_source3b", Validation.success(test3b_input));

    if (test3b.isFail()) {
        fail("About to crash with: " + test3b.fail().message());
    }
    assertTrue("getHarvestTechnology call succeeded", test3b.isSuccess());
    assertTrue("harvest tech created: ", test3b.success() != null);
    assertEquals(lib_elements.get(0).misc_entry_point(), test3b.success().getClass().getName());
}