List of usage examples for org.apache.commons.lang ArrayUtils toString
public static String toString(Object array)
Outputs an array as a String, treating null
as an empty array.
From source file:org.codehaus.groovy.grails.orm.hibernate.query.HibernateCriterionAdapter.java
/** * utility method that generically returns a criterion using methods in Restrictions * * @param constraintName - the criteria//ww w .j a va2s . c o m */ private static Criterion callRestrictionsMethod(String constraintName, Class<?>[] paramTypes, Object[] params) { final Method restrictionsMethod = ReflectionUtils.findMethod(Restrictions.class, constraintName, paramTypes); Assert.notNull(restrictionsMethod, "Could not find method: " + constraintName + " in class Restrictions for parameters: " + ArrayUtils.toString(params) + " with types: " + ArrayUtils.toString(paramTypes)); return (Criterion) ReflectionUtils.invokeMethod(restrictionsMethod, null, params); }
From source file:org.codehaus.groovy.grails.plugins.DefaultGrailsPlugin.java
private void evaluateOnChangeListener() { if (pluginBean.isReadableProperty(ON_SHUTDOWN)) { onShutdownListener = (Closure) GrailsClassUtils.getPropertyOrStaticPropertyOrFieldValue(plugin, ON_SHUTDOWN);/* w ww .j a v a 2 s .com*/ } if (pluginBean.isReadableProperty(ON_CONFIG_CHANGE)) { onConfigChangeListener = (Closure) GrailsClassUtils.getPropertyOrStaticPropertyOrFieldValue(plugin, ON_CONFIG_CHANGE); } if (pluginBean.isReadableProperty(ON_CHANGE)) { onChangeListener = (Closure) GrailsClassUtils.getPropertyOrStaticPropertyOrFieldValue(plugin, ON_CHANGE); } final boolean warDeployed = Metadata.getCurrent().isWarDeployed(); final boolean reloadEnabled = Environment.getCurrent().isReloadEnabled(); if (!((reloadEnabled || !warDeployed) && onChangeListener != null)) { return; } Object referencedResources = GrailsClassUtils.getPropertyOrStaticPropertyOrFieldValue(plugin, WATCHED_RESOURCES); try { List resourceList = null; if (referencedResources instanceof String) { if (LOG.isDebugEnabled()) { LOG.debug("Configuring plugin " + this + " to watch resources with pattern: " + referencedResources); } resourceList = Collections.singletonList(referencedResources.toString()); } else if (referencedResources instanceof List) { resourceList = (List) referencedResources; } if (resourceList == null) { return; } List<String> resourceListTmp = new ArrayList<String>(); PluginBuildSettings pluginBuildSettings = GrailsPluginUtils.getPluginBuildSettings(); if (pluginBuildSettings == null) { return; } final org.codehaus.groovy.grails.io.support.Resource[] pluginDirs = pluginBuildSettings .getPluginDirectories(); final Environment env = Environment.getCurrent(); final String baseLocation = env.getReloadLocation(); for (Object ref : resourceList) { String stringRef = ref.toString(); if (warDeployed) { addBaseLocationPattern(resourceListTmp, baseLocation, stringRef); } else { for (org.codehaus.groovy.grails.io.support.Resource pluginDir : pluginDirs) { if (pluginDir == null) continue; String pluginResources = getResourcePatternForBaseLocation( pluginDir.getFile().getCanonicalPath(), stringRef); resourceListTmp.add(pluginResources); } addBaseLocationPattern(resourceListTmp, baseLocation, stringRef); } } watchedResourcePatternReferences = new String[resourceListTmp.size()]; for (int i = 0; i < watchedResourcePatternReferences.length; i++) { String resRef = resourceListTmp.get(i); watchedResourcePatternReferences[i] = resRef; } watchedResourcePatterns = new WatchPatternParser() .getWatchPatterns(Arrays.asList(watchedResourcePatternReferences)); } catch (IllegalArgumentException e) { if (GrailsUtil.isDevelopmentEnv()) { LOG.debug("Cannot load plug-in resource watch list from [" + ArrayUtils.toString(watchedResourcePatternReferences) + "]. This means that the plugin " + this + ", will not be able to auto-reload changes effectively. Try runnng grails upgrade.: " + e.getMessage()); } } catch (IOException e) { if (GrailsUtil.isDevelopmentEnv()) { LOG.debug("Cannot load plug-in resource watch list from [" + ArrayUtils.toString(watchedResourcePatternReferences) + "]. This means that the plugin " + this + ", will not be able to auto-reload changes effectively. Try runnng grails upgrade.: " + e.getMessage()); } } }
From source file:org.codehaus.groovy.grails.plugins.DefaultGrailsPluginManager.java
/** * This method will attempt to load that plug-ins not loaded in the first pass *///from w w w . ja v a 2 s . c om private void loadDelayedPlugins() { while (!delayedLoadPlugins.isEmpty()) { GrailsPlugin plugin = delayedLoadPlugins.remove(0); if (areDependenciesResolved(plugin)) { if (!hasValidPluginsToLoadBefore(plugin)) { registerPlugin(plugin); } else { delayedLoadPlugins.add(plugin); } } else { // ok, it still hasn't resolved the dependency after the initial // load of all plugins. All hope is not lost, however, so lets first // look inside the remaining delayed loads before giving up boolean foundInDelayed = false; for (GrailsPlugin remainingPlugin : delayedLoadPlugins) { if (isDependentOn(plugin, remainingPlugin)) { foundInDelayed = true; break; } } if (foundInDelayed) { delayedLoadPlugins.add(plugin); } else { failedPlugins.put(plugin.getName(), plugin); LOG.warn( "WARNING: Plugin [" + plugin.getName() + "] cannot be loaded because its dependencies [" + ArrayUtils.toString(plugin.getDependencyNames()) + "] cannot be resolved"); } } } }
From source file:org.dashbuilder.dataprovider.backend.elasticsearch.ElasticSearchDataSetProvider.java
public DataSetMetadata getDataSetMetadata(DataSetDef def) throws Exception { // Type casting. ElasticSearchDataSetDef elasticSearchDataSetDef = (ElasticSearchDataSetDef) def; // Check if metadata already exists in cache. ElasticSearchDataSetMetadata result = (ElasticSearchDataSetMetadata) _metadataMap .get(elasticSearchDataSetDef.getUUID()); if (result != null) return result; // Data Set parameters. String[] index = elasticSearchDataSetDef.getIndex(); String[] type = elasticSearchDataSetDef.getType(); // Get the row count. long rowCount = getRowCount(elasticSearchDataSetDef); // Obtain the indexMappings MappingsResponse mappingsResponse = clientFactory.newClient(elasticSearchDataSetDef).getMappings(index); if (mappingsResponse == null || mappingsResponse.getStatus() != RESPONSE_CODE_OK) throw new IllegalArgumentException( "Cannot retrieve index mappings for index: [" + index[0] + "]. See previous errors."); // Obtain the columns (ids and types). List<String> columnIds = new LinkedList<String>(); List<ColumnType> columnTypes = new LinkedList<ColumnType>(); // Check if custom columns has been configured in the dataset definition or we have to query the index mappings and retrieve column information from it. Map<String, Object[]> columns = parseColumns(mappingsResponse.getIndexMappings(), elasticSearchDataSetDef); if (columns == null || columns.isEmpty()) throw new RuntimeException("There are no column for index [" + index[0] + "] and type [" + ArrayUtils.toString(type) + "]."); boolean isAllColumns = elasticSearchDataSetDef.isAllColumnsEnabled(); List<DataColumn> dataSetColumns = elasticSearchDataSetDef.getDataSet().getColumns(); if (isAllColumns) { // Use colmns given from EL index mapping. for (Map.Entry<String, Object[]> entry : columns.entrySet()) { String columnId = entry.getKey(); ColumnType columnType = (ColumnType) entry.getValue()[0]; // Check if there is any column definition override. DataColumn definitionColumn = getColumn(dataSetColumns, columnId); if (definitionColumn != null) { ColumnType definitionColumnType = definitionColumn.getColumnType(); if (columnType.equals(ColumnType.TEXT) && definitionColumnType.equals(ColumnType.LABEL)) throw new IllegalArgumentException("The column [" + columnId + "] is defined in dataset definition as LABEL, but the column in the index [" + index[0] + "] and type [" + ArrayUtils.toString(type) + "] is using ANALYZED index, you cannot use it as a label."); columnType = definitionColumnType; }// w w w . j av a 2s.c o m columnIds.add(columnId); columnTypes.add(columnType); } } else { // Use given columns from dataset definition. if (dataSetColumns != null && !dataSetColumns.isEmpty()) { for (DataColumn column : dataSetColumns) { String columnId = column.getId(); ColumnType columnType = column.getColumnType(); ColumnType indexColumnType = (ColumnType) columns.get(columnId)[0]; String format = (String) columns.get(columnId)[1]; // Check user defined column exists in the index/type. if (indexColumnType == null) throw new IllegalArgumentException("The column [" + columnId + "] defined in dataset definition does not exist for the index [" + index[0] + "] and type [" + ArrayUtils.toString(type) + "]."); // Check that analyzed fields on EL index definition are analyzed too in the dataset definition. if (indexColumnType.equals(ColumnType.TEXT) && columnType.equals(ColumnType.LABEL)) throw new IllegalArgumentException("The column [" + columnId + "] is defined in dataset definition as LABEL, but the column in the index [" + index[0] + "] and type [" + ArrayUtils.toString(type) + "] is using ANALYZED index, you cannot use it as a label."); columnIds.add(columnId); columnTypes.add(columnType); } } } int _rowCount = (int) rowCount; int estimatedSize = estimateSize(columnTypes, _rowCount); // Build the metadata instance. result = new ElasticSearchDataSetMetadata(def, def.getUUID(), _rowCount, columnIds.size(), columnIds, columnTypes, estimatedSize); // Set the index field patterns from EL server. for (Map.Entry<String, Object[]> entry : columns.entrySet()) { String pattern = (String) entry.getValue()[1]; if (pattern != null && pattern.trim().length() > 0) result.setFieldPattern(entry.getKey(), pattern); } // Put into cache. _metadataMap.put(def.getUUID(), result); return result; }
From source file:org.datacleaner.configuration.JaxbConfigurationReaderTest.java
public void testReadComplexDataInPojoDatastore() throws Exception { DataCleanerConfiguration configuration = reader .create(new File("src/test/resources/example-configuration-pojo-datastore-with-complex-data.xml")); Datastore datastore = configuration.getDatastoreCatalog().getDatastore("pojo"); assertNotNull(datastore);/*from www. j a va 2 s. co m*/ DatastoreConnection con = datastore.openConnection(); DataContext dc = con.getDataContext(); Table table = dc.getDefaultSchema().getTable(0); Column[] columns = table.getColumns(); assertEquals("[Column[name=Foo,columnNumber=0,type=VARCHAR,nullable=true,nativeType=null,columnSize=null], " + "Column[name=Bar,columnNumber=1,type=MAP,nullable=true,nativeType=null,columnSize=null], " + "Column[name=Baz,columnNumber=2,type=LIST,nullable=true,nativeType=null,columnSize=null], " + "Column[name=bytes,columnNumber=3,type=BINARY,nullable=true,nativeType=null,columnSize=null]]", Arrays.toString(columns)); DataSet ds = dc.query().from(table).select(columns).execute(); assertTrue(ds.next()); assertEquals("Hello", ds.getRow().getValue(0).toString()); assertEquals("{greeting=hello, person=world}", ds.getRow().getValue(1).toString()); assertEquals("[hello, world]", ds.getRow().getValue(2).toString()); assertEquals("{1,2,3,4,5}", ArrayUtils.toString(ds.getRow().getValue(3))); assertTrue(ds.getRow().getValue(1) instanceof Map); assertTrue(ds.getRow().getValue(2) instanceof List); assertTrue(ds.getRow().getValue(3) instanceof byte[]); assertTrue(ds.next()); assertEquals("There", ds.getRow().getValue(0).toString()); assertEquals("{greeting=hi, there you!, person={Firstname=Kasper, Lastname=Srensen}}", ds.getRow().getValue(1).toString()); assertEquals(null, ds.getRow().getValue(2)); assertEquals(null, ds.getRow().getValue(3)); assertTrue(ds.getRow().getValue(1) instanceof Map); assertTrue(ds.next()); assertEquals("World", ds.getRow().getValue(0).toString()); assertEquals(null, ds.getRow().getValue(1)); assertEquals("[Srensen, Kasper]", ds.getRow().getValue(2).toString()); assertEquals("{-1,-2,-3,-4,-5}", ArrayUtils.toString(ds.getRow().getValue(3))); assertTrue(ds.getRow().getValue(2) instanceof List); assertTrue(ds.getRow().getValue(3) instanceof byte[]); }
From source file:org.datacleaner.widgets.properties.MultipleMappedEnumsPropertyWidgetTest.java
public void testRestoreEnumValuesFromFile() throws Exception { final DCModule dcModule = new DCModuleImpl(); final FileObject file = VFS.getManager().resolveFile("src/test/resources/mapped_columns_job.analysis.xml"); final Injector injector1 = Guice.createInjector(dcModule); final DataCleanerConfiguration configuration = injector1.getInstance(DataCleanerConfiguration.class); final Injector injector2 = OpenAnalysisJobActionListener.open(file, configuration, injector1); final List<AnalyzerComponentBuilder<?>> analyzers; if (GraphicsEnvironment.isHeadless()) { analyzers = injector2.getInstance(AnalysisJobBuilder.class).getAnalyzerComponentBuilders(); } else {/*from ww w .ja va 2 s .c om*/ final AnalysisJobBuilderWindow window = injector2.getInstance(AnalysisJobBuilderWindow.class); analyzers = window.getAnalysisJobBuilder().getAnalyzerComponentBuilders(); } assertEquals(2, analyzers.size()); final AnalyzerComponentBuilder<?> completenessAnalyzer = analyzers.get(0); assertEquals("Completeness analyzer", completenessAnalyzer.getDescriptor().getDisplayName()); final Set<ConfiguredPropertyDescriptor> enumProperties = completenessAnalyzer.getDescriptor() .getConfiguredPropertiesByType(CompletenessAnalyzer.Condition[].class, false); assertEquals(1, enumProperties.size()); final Set<ConfiguredPropertyDescriptor> inputProperties = completenessAnalyzer.getDescriptor() .getConfiguredPropertiesForInput(false); assertEquals(1, inputProperties.size()); final ConfiguredPropertyDescriptor enumProperty = enumProperties.iterator().next(); final Enum<?>[] enumValue = (Enum<?>[]) completenessAnalyzer.getConfiguredProperty(enumProperty); assertEquals("{NOT_NULL,NOT_BLANK_OR_NULL}", ArrayUtils.toString(enumValue)); final ConfiguredPropertyDescriptor inputProperty = inputProperties.iterator().next(); final InputColumn<?>[] inputValue = (InputColumn<?>[]) completenessAnalyzer .getConfiguredProperty(inputProperty); final MultipleMappedEnumsPropertyWidget inputWidget = new MultipleMappedEnumsPropertyWidget( completenessAnalyzer, inputProperty, enumProperty); final PropertyWidget<Object[]> enumWidget = inputWidget.getMappedEnumsPropertyWidget(); enumWidget.initialize(EnumerationValue.fromArray(enumValue)); inputWidget.initialize(inputValue); inputWidget.onValueTouched(inputValue); enumWidget.onValueTouched(EnumerationValue.fromArray(enumValue)); assertEquals("{NOT_NULL,NOT_BLANK_OR_NULL}", ArrayUtils.toString(enumWidget.getValue())); }
From source file:org.datacleaner.widgets.properties.PropertyWidgetFactoryTest.java
private void performAssertions(final PropertyWidgetFactory propertyWidgetFactory, final String propertyName, final Class<? extends PropertyWidget<?>> widgetClass, final Object initialValue, final Object setValue) { @SuppressWarnings("unchecked") PropertyWidget<Object> widget = (PropertyWidget<Object>) propertyWidgetFactory.create(propertyName); assertNotNull(widget);/*from w ww .j a v a 2 s .c om*/ assertEquals(widgetClass, widget.getClass()); widget.initialize(null); assertEquals(propertyName, widget.getPropertyDescriptor().getName()); final boolean equals = EqualsBuilder.equals(initialValue, widget.getValue()); if (!equals) { assertEquals(ArrayUtils.toString(initialValue), ArrayUtils.toString(widget.getValue())); } assertTrue("Expected: " + initialValue + ", actual: " + widget.getValue(), equals); widget.onValueTouched(setValue); assertTrue(widget.isSet()); assertTrue( "Expected: " + ArrayUtils.toString(setValue) + ", actual: " + ArrayUtils.toString(widget.getValue()), EqualsBuilder.equals(setValue, widget.getValue())); }
From source file:org.datacleaner.widgets.table.DCTableCellRenderer.java
@Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { logger.debug("getTableCellRendererComponent({},{})", row, column); if (value != null) { if (value.getClass().isArray()) { // arrays are printed nicely this way value = ArrayUtils.toString(value); }/*from www . j a v a2 s . c om*/ } // icons are displayed as labels if (value instanceof Icon) { final JLabel label = new JLabel((Icon) value); label.setOpaque(true); value = label; } final Component result; // render components directly if (value instanceof JComponent) { final JComponent component = (JComponent) value; component.setOpaque(true); if (component.getMouseListeners().length == 0) { component.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { MouseEvent newEvent = SwingUtilities.convertMouseEvent(component, e, _table); _table.consumeMouseClick(newEvent); } }); } result = component; } else { result = _delegate.getTableCellRendererComponent(_table, value, isSelected, hasFocus, row, column); assert result instanceof JLabel; } // alignment is applied to all labels or panels (with flowlayout) Alignment alignment = _alignmentOverrides.get(column); if (alignment == null) { alignment = Alignment.LEFT; } // set alignment if (value instanceof JPanel) { final LayoutManager layout = ((JPanel) value).getLayout(); if (layout instanceof FlowLayout) { final FlowLayout flowLayout = (FlowLayout) layout; flowLayout.setAlignment(alignment.getFlowLayoutAlignment()); } } else if (result instanceof JLabel) { final JLabel label = (JLabel) result; label.setHorizontalAlignment(alignment.getSwingContstantsAlignment()); WidgetUtils.setAppropriateFont(label); } return result; }
From source file:org.deeplearning4j.models.word2vec.WordVectorSerializerTest.java
@Test public void testFullModelSerialization() throws Exception { File inputFile = new ClassPathResource("/big/raw_sentences.txt").getFile(); SentenceIterator iter = UimaSentenceIterator.createWithPath(inputFile.getAbsolutePath()); // Split on white spaces in the line to get words TokenizerFactory t = new DefaultTokenizerFactory(); t.setTokenPreProcessor(new CommonPreprocessor()); InMemoryLookupCache cache = new InMemoryLookupCache(false); WeightLookupTable table = new InMemoryLookupTable.Builder().vectorLength(100).useAdaGrad(false) .negative(5.0).cache(cache).lr(0.025f).build(); Word2Vec vec = new Word2Vec.Builder().minWordFrequency(5).iterations(1).epochs(1).layerSize(100) .lookupTable(table).stopWords(new ArrayList<String>()).useAdaGrad(false).negativeSample(5) .vocabCache(cache).seed(42)//ww w . ja v a 2 s. com // .workers(6) .windowSize(5).iterate(iter).tokenizerFactory(t).build(); assertEquals(new ArrayList<String>(), vec.getStopWords()); vec.fit(); logger.info("Original word 0: " + cache.wordFor(cache.wordAtIndex(0))); logger.info("Closest Words:"); Collection<String> lst = vec.wordsNearest("day", 10); System.out.println(lst); WordVectorSerializer.writeFullModel(vec, "tempModel.txt"); File modelFile = new File("tempModel.txt"); assertTrue(modelFile.exists()); assertTrue(modelFile.length() > 0); Word2Vec vec2 = WordVectorSerializer.loadFullModel("tempModel.txt"); assertNotEquals(null, vec2); assertEquals(vec.getConfiguration(), vec2.getConfiguration()); logger.info("Source ExpTable: " + ArrayUtils.toString(((InMemoryLookupTable) table).getExpTable())); logger.info("Dest ExpTable: " + ArrayUtils.toString(((InMemoryLookupTable) vec2.getLookupTable()).getExpTable())); assertTrue(ArrayUtils.isEquals(((InMemoryLookupTable) table).getExpTable(), ((InMemoryLookupTable) vec2.getLookupTable()).getExpTable())); InMemoryLookupTable restoredTable = (InMemoryLookupTable) vec2.lookupTable(); /* logger.info("Restored word 1: " + restoredTable.getVocab().wordFor(restoredTable.getVocab().wordAtIndex(1))); logger.info("Restored word 'it': " + restoredTable.getVocab().wordFor("it")); logger.info("Original word 1: " + cache.wordFor(cache.wordAtIndex(1))); logger.info("Original word 'i': " + cache.wordFor("i")); logger.info("Original word 0: " + cache.wordFor(cache.wordAtIndex(0))); logger.info("Restored word 0: " + restoredTable.getVocab().wordFor(restoredTable.getVocab().wordAtIndex(0))); */ assertEquals(cache.wordAtIndex(1), restoredTable.getVocab().wordAtIndex(1)); assertEquals(cache.wordAtIndex(7), restoredTable.getVocab().wordAtIndex(7)); assertEquals(cache.wordAtIndex(15), restoredTable.getVocab().wordAtIndex(15)); /* these tests needed only to make sure INDArray equality is working properly */ double[] array1 = new double[] { 0.323232325, 0.65756575, 0.12315, 0.12312315, 0.1232135, 0.12312315, 0.4343423425, 0.15 }; double[] array2 = new double[] { 0.423232325, 0.25756575, 0.12375, 0.12311315, 0.1232035, 0.12318315, 0.4343493425, 0.25 }; assertNotEquals(Nd4j.create(array1), Nd4j.create(array2)); assertEquals(Nd4j.create(array1), Nd4j.create(array1)); INDArray rSyn0_1 = restoredTable.getSyn0().slice(1); INDArray oSyn0_1 = ((InMemoryLookupTable) table).getSyn0().slice(1); logger.info("Restored syn0: " + rSyn0_1); logger.info("Original syn0: " + oSyn0_1); assertEquals(oSyn0_1, rSyn0_1); // just checking $^###! syn0/syn1 order int cnt = 0; for (VocabWord word : cache.vocabWords()) { INDArray rSyn0 = restoredTable.getSyn0().slice(word.getIndex()); INDArray oSyn0 = ((InMemoryLookupTable) table).getSyn0().slice(word.getIndex()); assertEquals(rSyn0, oSyn0); assertEquals(1.0, arraysSimilarity(rSyn0, oSyn0), 0.001); INDArray rSyn1 = restoredTable.getSyn1().slice(word.getIndex()); INDArray oSyn1 = ((InMemoryLookupTable) table).getSyn1().slice(word.getIndex()); assertEquals(rSyn1, oSyn1); if (arraysSimilarity(rSyn1, oSyn1) < 0.98) { logger.info("Restored syn1: " + rSyn1); logger.info("Original syn1: " + oSyn1); } // we exclude word 222 since it has syn1 full of zeroes if (cnt != 222) assertEquals(1.0, arraysSimilarity(rSyn1, oSyn1), 0.001); if (((InMemoryLookupTable) table).getSyn1Neg() != null) { INDArray rSyn1Neg = restoredTable.getSyn1Neg().slice(word.getIndex()); INDArray oSyn1Neg = ((InMemoryLookupTable) table).getSyn1Neg().slice(word.getIndex()); assertEquals(rSyn1Neg, oSyn1Neg); // assertEquals(1.0, arraysSimilarity(rSyn1Neg, oSyn1Neg), 0.001); } assertEquals(word.getHistoricalGradient(), restoredTable.getVocab().wordFor(word.getWord()).getHistoricalGradient()); cnt++; } // at this moment we can assume that whole model is transferred, and we can call fit over new model // iter.reset(); iter = UimaSentenceIterator.createWithPath(inputFile.getAbsolutePath()); vec2.setTokenizerFactory(t); vec2.setSentenceIter(iter); vec2.fit(); INDArray day1 = vec.getWordVectorMatrix("day"); INDArray day2 = vec2.getWordVectorMatrix("day"); INDArray night1 = vec.getWordVectorMatrix("night"); INDArray night2 = vec2.getWordVectorMatrix("night"); double simD = arraysSimilarity(day1, day2); double simN = arraysSimilarity(night1, night2); logger.info("Vec1 day: " + day1); logger.info("Vec2 day: " + day2); logger.info("Vec1 night: " + night1); logger.info("Vec2 night: " + night2); logger.info("Day/day cross-model similarity: " + simD); logger.info("Night/night cross-model similarity: " + simN); logger.info("Vec1 day/night similiraty: " + vec.similarity("day", "night")); logger.info("Vec2 day/night similiraty: " + vec2.similarity("day", "night")); // check if cross-model values are not the same assertNotEquals(1.0, simD, 0.001); assertNotEquals(1.0, simN, 0.001); // check if cross-model values are still close to each other assertTrue(simD > 0.70); assertTrue(simN > 0.70); modelFile.delete(); }
From source file:org.diffkit.diff.sns.DKSpreadSheetFileSource.java
public DKSpreadSheetFileSource(String filePath_, String sheetName_, DKTableModel requestedModel_, String[] keyColumnNames_, int[] readColumnIdxs_, boolean isSorted_, boolean hasHeader_, boolean validateLazily_) { if (_isDebugEnabled) { _log.debug("filePath_->{}", filePath_); _log.debug("sheetName_->{}", sheetName_); _log.debug("requestedModel_->{}", requestedModel_); _log.debug("keyColumnNames_->{}", ArrayUtils.toString(keyColumnNames_)); _log.debug("readColumnIdxs_->{}", ArrayUtils.toString(readColumnIdxs_)); _log.debug("isSorted_->{}", isSorted_); _log.debug("hasHeader_->{}", hasHeader_); _log.debug("validateLazily_->{}", validateLazily_); }//from w w w . j av a2 s. co m DKValidate.notNull(filePath_); _sheet = createSheet(filePath_, sheetName_, isSorted_, hasHeader_, validateLazily_); DKValidate.notNull(_sheet); _requestedModel = requestedModel_ == null ? null : requestedModel_.copy(); if (!ArrayUtils.isEmpty(readColumnIdxs_)) throw new NotImplementedException("readColumnIdxs_ not yet supported!"); _requestedKeyColumnNames = keyColumnNames_; }