Example usage for java.util.concurrent TimeUnit HOURS

List of usage examples for java.util.concurrent TimeUnit HOURS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit HOURS.

Prototype

TimeUnit HOURS

To view the source code for java.util.concurrent TimeUnit HOURS.

Click Source Link

Document

Time unit representing sixty minutes.

Usage

From source file:org.ethereum.rpc.Web3Impl.java

public String eth_netHashrate() {
    BigInteger hashesPerHour = this.worldManager.getHashRateCalculator().calculateNetHashRate(1L,
            TimeUnit.HOURS);
    BigDecimal hashesPerSecond = new BigDecimal(hashesPerHour)
            .divide(new BigDecimal(TimeUnit.HOURS.toSeconds(1)), 3, RoundingMode.HALF_UP);

    String result = hashesPerSecond.toString();

    if (logger.isDebugEnabled())
        logger.debug("eth_netHashrate(): " + result);

    return result;
}

From source file:org.dcache.util.histograms.HistogramModelTest.java

@Test
public void updateOnTimeframeHistogramShouldRotateBufferToMaximum() throws NoSuchMethodException,
        InstantiationException, IllegalAccessException, InvocationTargetException {
    givenTimeframeHistogram();//from w  ww .j  a  v  a  2 s .  c o  m
    givenQueueCountValuesFor(48);
    givenBinUnitOf((double) TimeUnit.HOURS.toMillis(1));
    givenBinCountOf(48);
    givenBinLabelOf(TimeUnit.HOURS.name());
    givenDataLabelOf("COUNT");
    givenHistogramTypeOf("Queued Movers");
    givenHighestBinOf(getHoursInThePastFromNow(53));
    whenConfigureIsCalled();
    assertThatUpdateRotatesBuffer(53);
}

From source file:org.bimserver.geometry.StreamingGeometryGenerator.java

@SuppressWarnings("unchecked")
public GenerateGeometryResult generateGeometry(long uoid, final DatabaseSession databaseSession,
        QueryContext queryContext, long nrObjects)
        throws BimserverDatabaseException, GeometryGeneratingException {
    GenerateGeometryResult generateGeometryResult = new GenerateGeometryResult();
    packageMetaData = queryContext.getPackageMetaData();
    productClass = packageMetaData.getEClass("IfcProduct");
    geometryFeature = productClass.getEStructuralFeature("geometry");
    representationFeature = productClass.getEStructuralFeature("Representation");
    representationsFeature = packageMetaData.getEClass("IfcProductDefinitionShape")
            .getEStructuralFeature("Representations");
    itemsFeature = packageMetaData.getEClass("IfcShapeRepresentation").getEStructuralFeature("Items");
    mappingSourceFeature = packageMetaData.getEClass("IfcMappedItem").getEStructuralFeature("MappingSource");

    GregorianCalendar now = new GregorianCalendar();
    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
    debugIdentifier = dateFormat.format(now.getTime()) + " (" + report.getOriginalIfcFileName() + ")";

    long start = System.nanoTime();
    String pluginName = "";
    if (queryContext.getPackageMetaData().getSchema() == Schema.IFC4) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc4StepStreamingSerializerPlugin";
    } else if (queryContext.getPackageMetaData().getSchema() == Schema.IFC2X3TC1) {
        pluginName = "org.bimserver.ifc.step.serializer.Ifc2x3tc1StepStreamingSerializerPlugin";
    } else {/*from  w w  w .  ja  va  2s.  co  m*/
        throw new GeometryGeneratingException(
                "Unknown schema " + queryContext.getPackageMetaData().getSchema());
    }

    reuseGeometry = bimServer.getServerSettingsCache().getServerSettings().isReuseGeometry();
    optimizeMappedItems = bimServer.getServerSettingsCache().getServerSettings().isOptimizeMappedItems();

    report.setStart(new GregorianCalendar());
    report.setIfcSchema(queryContext.getPackageMetaData().getSchema());
    report.setUseMappingOptimization(optimizeMappedItems);
    report.setReuseGeometry(reuseGeometry);

    try {
        final StreamingSerializerPlugin ifcSerializerPlugin = (StreamingSerializerPlugin) bimServer
                .getPluginManager().getPlugin(pluginName, true);
        if (ifcSerializerPlugin == null) {
            throw new UserException("No IFC serializer found");
        }

        User user = (User) databaseSession.get(uoid, org.bimserver.database.OldQuery.getDefault());
        UserSettings userSettings = user.getUserSettings();

        report.setUserName(user.getName());
        report.setUserUserName(user.getUsername());

        RenderEnginePluginConfiguration renderEngine = null;
        if (eoid != -1) {
            renderEngine = databaseSession.get(eoid, OldQuery.getDefault());
        } else {
            renderEngine = userSettings.getDefaultRenderEngine();
        }
        if (renderEngine == null) {
            throw new UserException("No default render engine has been selected for this user");
        }
        renderEngineName = renderEngine.getName();

        int availableProcessors = Runtime.getRuntime().availableProcessors();
        report.setAvailableProcessors(availableProcessors);

        int maxSimultanousThreads = Math.min(
                bimServer.getServerSettingsCache().getServerSettings().getRenderEngineProcesses(),
                availableProcessors);
        if (maxSimultanousThreads < 1) {
            maxSimultanousThreads = 1;
        }

        final RenderEngineSettings settings = new RenderEngineSettings();
        settings.setPrecision(Precision.SINGLE);
        settings.setIndexFormat(IndexFormat.AUTO_DETECT);
        settings.setGenerateNormals(true);
        settings.setGenerateTriangles(true);
        settings.setGenerateWireFrame(false);

        final RenderEngineFilter renderEngineFilter = new RenderEngineFilter();

        RenderEnginePool renderEnginePool = bimServer.getRenderEnginePools().getRenderEnginePool(
                packageMetaData.getSchema(), renderEngine.getPluginDescriptor().getPluginClassName(),
                bimServer.getPluginSettingsCache().getPluginSettings(renderEngine.getOid()));

        report.setRenderEngineName(renderEngine.getName());
        report.setRenderEnginePluginVersion(
                renderEngine.getPluginDescriptor().getPluginBundleVersion().getVersion());

        try (RenderEngine engine = renderEnginePool.borrowObject()) {
            VersionInfo versionInfo = renderEnginePool.getRenderEngineFactory().getVersionInfo();
            report.setRenderEngineVersion(versionInfo);
            applyLayerSets = engine.isApplyLayerSets();
            report.setApplyLayersets(applyLayerSets);
            calculateQuantities = engine.isCalculateQuantities();
            report.setCalculateQuantities(calculateQuantities);
        }

        // TODO reuse, pool the pools :) Or something smarter
        // TODO reuse queue, or try to determine a realistic size, or don't use a fixed-size queue
        ThreadPoolExecutor executor = new ThreadPoolExecutor(maxSimultanousThreads, maxSimultanousThreads, 24,
                TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000000));

        JsonQueryObjectModelConverter jsonQueryObjectModelConverter = new JsonQueryObjectModelConverter(
                packageMetaData);
        String queryNameSpace = packageMetaData.getSchema().name().toLowerCase() + "-stdlib";

        // Al references should already be direct, since this is now done in BimServer on startup, quite the hack...
        Include objectPlacement = jsonQueryObjectModelConverter
                .getDefineFromFile(queryNameSpace + ":ObjectPlacement", true);

        Set<EClass> classes = null;
        if (queryContext.getOidCounters() != null) {
            classes = queryContext.getOidCounters().keySet();
        } else {
            classes = packageMetaData.getEClasses();
        }

        float multiplierToMm = processUnits(databaseSession, queryContext);
        generateGeometryResult.setMultiplierToMm(multiplierToMm);

        // Phase 1 (mapped item detection) sometimes detects that mapped items have invalid (unsupported) RepresentationIdentifier values, this set keeps track of objects to skip in Phase 2 because of that
        Set<Long> toSkip = new HashSet<>();

        for (EClass eClass : classes) {
            if (packageMetaData.getEClass("IfcProduct").isSuperTypeOf(eClass)) {
                int nrObjectsForType = 0;

                Query query2 = new Query(eClass.getName() + "Main query", packageMetaData);
                QueryPart queryPart2 = query2.createQueryPart();
                queryPart2.addType(eClass, false);
                Include representationInclude = queryPart2.createInclude();
                representationInclude.addType(eClass, false);
                representationInclude.addFieldDirect("Representation");
                Include representationsInclude = representationInclude.createInclude();
                representationsInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                representationsInclude.addFieldDirect("Representations");
                Include itemsInclude = representationsInclude.createInclude();
                itemsInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), false);
                itemsInclude.addFieldDirect("Items");
                itemsInclude.addFieldDirect("ContextOfItems");
                Include mappingSourceInclude = itemsInclude.createInclude();
                mappingSourceInclude.addType(packageMetaData.getEClass("IfcMappedItem"), false);
                mappingSourceInclude.addFieldDirect("MappingSource");
                mappingSourceInclude.addFieldDirect("MappingTarget");
                Include representationMap = mappingSourceInclude.createInclude();
                representationMap.addType(packageMetaData.getEClass("IfcRepresentationMap"), false);
                representationMap.addFieldDirect("MappedRepresentation");
                Include createInclude = representationMap.createInclude();
                createInclude.addType(packageMetaData.getEClass("IfcShapeRepresentation"), true);

                Include targetInclude = mappingSourceInclude.createInclude();
                targetInclude.addType(packageMetaData.getEClass("IfcCartesianTransformationOperator3D"), false);
                targetInclude.addFieldDirect("Axis1");
                targetInclude.addFieldDirect("Axis2");
                targetInclude.addFieldDirect("Axis3");
                targetInclude.addFieldDirect("LocalOrigin");

                queryPart2.addInclude(objectPlacement);

                Map<Long, Map<Long, ProductDef>> representationMapToProduct = new HashMap<>();

                QueryObjectProvider queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer,
                        query2, Collections.singleton(queryContext.getRoid()), packageMetaData);
                HashMapVirtualObject next = queryObjectProvider2.next();
                int nrProductsWithRepresentation = 0;
                while (next != null) {
                    if (next.eClass() == eClass) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> representations = representation
                                    .getDirectListFeature(representationsFeature);
                            if (representations != null) {
                                boolean foundValidContext = false;
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (usableContext(representationItem)) {
                                        foundValidContext = true;
                                    }
                                }
                                if (foundValidContext) {
                                    nrProductsWithRepresentation++;
                                }
                                for (HashMapVirtualObject representationItem : representations) {
                                    if (!usableContext(representationItem) && foundValidContext) {
                                        continue;
                                    }
                                    if (hasValidRepresentationIdentifier(representationItem)) {
                                        Set<HashMapVirtualObject> items = representationItem
                                                .getDirectListFeature(itemsFeature);
                                        if (items == null || items.size() > 1) {
                                            // Only if there is just one item, we'll store this for reuse
                                            continue;
                                        }
                                        // So this next loop always results in 1 (or no) loops
                                        for (HashMapVirtualObject item : items) {
                                            report.addRepresentationItem(item.eClass().getName());
                                            if (!packageMetaData.getEClass("IfcMappedItem")
                                                    .isSuperTypeOf(item.eClass())) {
                                                nrObjectsForType++;
                                                continue; // All non IfcMappedItem objects will be done in phase 2
                                            }
                                            AbstractHashMapVirtualObject mappingTarget = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingTarget"));
                                            AbstractHashMapVirtualObject mappingSourceOfMappedItem = item
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcMappedItem", "MappingSource"));
                                            if (mappingSourceOfMappedItem == null) {
                                                LOGGER.info("No mapping source");
                                                continue;
                                            }
                                            AbstractHashMapVirtualObject mappedRepresentation = mappingSourceOfMappedItem
                                                    .getDirectFeature(packageMetaData.getEReference(
                                                            "IfcRepresentationMap", "MappedRepresentation"));

                                            if (!hasValidRepresentationIdentifier(mappedRepresentation)) {
                                                // Skip this mapping, we should store somewhere that this object should also be skipped in the normal way
                                                // TODO too many log statements, should log only 1 line for the complete model
                                                //                                       LOGGER.info("Skipping because of invalid RepresentationIdentifier in mapped item (" + (String) mappedRepresentation.get("RepresentationIdentifier") + ")");
                                                report.addSkippedBecauseOfInvalidRepresentationIdentifier(
                                                        (String) mappedRepresentation
                                                                .get("RepresentationIdentifier"));
                                                toSkip.add(next.getOid());
                                                continue;
                                            }
                                            double[] mappingMatrix = Matrix.identity();
                                            double[] productMatrix = Matrix.identity();
                                            if (mappingTarget != null) {
                                                AbstractHashMapVirtualObject axis1 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis1"));
                                                AbstractHashMapVirtualObject axis2 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis2"));
                                                AbstractHashMapVirtualObject axis3 = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator", "Axis3"));
                                                AbstractHashMapVirtualObject localOrigin = mappingTarget
                                                        .getDirectFeature(packageMetaData.getEReference(
                                                                "IfcCartesianTransformationOperator",
                                                                "LocalOrigin"));

                                                double[] a1 = null;
                                                double[] a2 = null;
                                                double[] a3 = null;

                                                if (axis3 != null) {
                                                    List<Double> list = (List<Double>) axis3
                                                            .get("DirectionRatios");
                                                    a3 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                } else {
                                                    a3 = new double[] { 0, 0, 1, 1 };
                                                    Vector.normalize(a3);
                                                }

                                                if (axis1 != null) {
                                                    List<Double> list = (List<Double>) axis1
                                                            .get("DirectionRatios");
                                                    a1 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a1);
                                                } else {
                                                    //                                          if (a3[0] == 1 && a3[1] == 0 && a3[2] == 0) {
                                                    a1 = new double[] { 1, 0, 0, 1 };
                                                    //                                          } else {
                                                    //                                             a1 = new double[]{0, 1, 0, 1};
                                                    //                                          }
                                                }

                                                double[] xVec = Vector.scalarProduct(Vector.dot(a1, a3), a3);
                                                double[] xAxis = Vector.subtract(a1, xVec);
                                                Vector.normalize(xAxis);

                                                if (axis2 != null) {
                                                    List<Double> list = (List<Double>) axis2
                                                            .get("DirectionRatios");
                                                    a2 = new double[] { list.get(0), list.get(1), list.get(2) };
                                                    Vector.normalize(a2);
                                                } else {
                                                    a2 = new double[] { 0, 1, 0, 1 };
                                                }

                                                double[] tmp = Vector.scalarProduct(Vector.dot(a2, a3), a3);
                                                double[] yAxis = Vector.subtract(a2, tmp);
                                                tmp = Vector.scalarProduct(Vector.dot(a2, xAxis), xAxis);
                                                yAxis = Vector.subtract(yAxis, tmp);
                                                Vector.normalize(yAxis);

                                                a2 = yAxis;
                                                a1 = xAxis;

                                                List<Double> t = (List<Double>) localOrigin.get("Coordinates");
                                                mappingMatrix = new double[] { a1[0], a1[1], a1[2], 0, a2[0],
                                                        a2[1], a2[2], 0, a3[0], a3[1], a3[2], 0,
                                                        t.get(0).doubleValue(), t.get(1).doubleValue(),
                                                        t.get(2).doubleValue(), 1 };
                                            }

                                            AbstractHashMapVirtualObject placement = next
                                                    .getDirectFeature(packageMetaData
                                                            .getEReference("IfcProduct", "ObjectPlacement"));
                                            if (placement != null) {
                                                productMatrix = placementToMatrix(placement);
                                            }

                                            AbstractHashMapVirtualObject mappingSource = item
                                                    .getDirectFeature(mappingSourceFeature);
                                            if (mappingSource != null) {
                                                Map<Long, ProductDef> map = representationMapToProduct
                                                        .get(((HashMapVirtualObject) mappingSource).getOid());
                                                if (map == null) {
                                                    map = new LinkedHashMap<>();
                                                    representationMapToProduct.put(
                                                            ((HashMapVirtualObject) mappingSource).getOid(),
                                                            map);
                                                }
                                                ProductDef pd = new ProductDef(next.getOid());
                                                pd.setMappedItemOid(item.getOid());
                                                pd.setObject(next);

                                                pd.setProductMatrix(productMatrix);
                                                pd.setMappingMatrix(mappingMatrix);
                                                map.put(next.getOid(), pd);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }

                Set<Long> done = new HashSet<>();

                for (Long repMapId : representationMapToProduct.keySet()) {
                    Map<Long, ProductDef> map = representationMapToProduct.get(repMapId);

                    // When there is more than one instance using this mapping
                    if (map.size() > 1) {
                        Query query = new Query("Reuse query " + eClass.getName(), packageMetaData);
                        QueryPart queryPart = query.createQueryPart();
                        //                     QueryPart queryPart3 = query.createQueryPart();
                        queryPart.addType(eClass, false);
                        //                     queryPart3.addType(packageMetaData.getEClass("IfcMappedItem"), false);

                        long masterOid = map.values().iterator().next().getOid();

                        double[] inverted = Matrix.identity();
                        ProductDef masterProductDef = map.get(masterOid);
                        if (!Matrix.invertM(inverted, 0, masterProductDef.getMappingMatrix(), 0)) {
                            LOGGER.debug("No inverse, this mapping will be skipped and processed as normal");
                            // This is probably because of mirroring of something funky

                            // TODO we should however be able to squeeze out a little more reuse by finding another master...
                            continue;
                        }

                        for (ProductDef pd : map.values()) {
                            done.add(pd.getOid());
                            if (!optimizeMappedItems) {
                                queryPart.addOid(pd.getOid());

                                // In theory these should be fused together during querying
                                //                           queryPart3.addOid(pd.getMappedItemOid());
                            } else {
                                pd.setMasterOid(masterOid);
                            }
                        }
                        if (optimizeMappedItems) {
                            queryPart.addOid(masterOid);
                        }

                        LOGGER.debug("Running " + map.size()
                                + " objects in one batch because of reused geometry " + (eClass.getName()));

                        //                     queryPart3.addInclude(jsonQueryObjectModelConverter.getDefineFromFile("ifc2x3tc1-stdlib:IfcMappedItem"));

                        processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                                settings, renderEngineFilter, renderEnginePool, executor, eClass, query,
                                queryPart, true, map, map.size());
                    }
                }

                Query query3 = new Query("Remaining " + eClass.getName(), packageMetaData);
                QueryPart queryPart3 = query3.createQueryPart();
                queryPart3.addType(eClass, false);
                Include include3 = queryPart3.createInclude();
                include3.addType(eClass, false);
                include3.addFieldDirect("Representation");
                Include rInclude = include3.createInclude();
                rInclude.addType(packageMetaData.getEClass("IfcProductRepresentation"), true);
                rInclude.addFieldDirect("Representations");
                Include representationsInclude2 = rInclude.createInclude();
                representationsInclude2.addType(packageMetaData.getEClass("IfcShapeModel"), true);
                representationsInclude2.addFieldDirect("ContextOfItems");

                queryObjectProvider2 = new QueryObjectProvider(databaseSession, bimServer, query3,
                        Collections.singleton(queryContext.getRoid()), packageMetaData);
                next = queryObjectProvider2.next();

                Query query = new Query("Main " + eClass.getName(), packageMetaData);
                QueryPart queryPart = query.createQueryPart();
                int written = 0;

                int maxObjectsPerFile = 0;
                if (nrProductsWithRepresentation <= 100) {
                    maxObjectsPerFile = 1;
                } else if (nrProductsWithRepresentation < 10000) {
                    maxObjectsPerFile = (int) (nrProductsWithRepresentation / 100);
                } else {
                    maxObjectsPerFile = 100;
                }

                //               LOGGER.info(report.getOriginalIfcFileName());
                //               LOGGER.info("Max objects per file: " + maxObjectsPerFile + " (" + eClass.getName() + ": " + nrProductsWithRepresentation + ")");

                report.setMaxPerFile(maxObjectsPerFile);

                while (next != null) {
                    if (next.eClass() == eClass && !done.contains(next.getOid())
                            && !toSkip.contains(next.getOid())) {
                        AbstractHashMapVirtualObject representation = next
                                .getDirectFeature(representationFeature);
                        if (representation != null) {
                            Set<HashMapVirtualObject> list = representation.getDirectListFeature(packageMetaData
                                    .getEReference("IfcProductRepresentation", "Representations"));
                            boolean goForIt = goForIt(list);
                            if (goForIt) {
                                if (next.eClass() == eClass && !done.contains(next.getOid())) {
                                    representation = next.getDirectFeature(representationFeature);
                                    if (representation != null) {
                                        list = representation.getDirectListFeature(packageMetaData
                                                .getEReference("IfcProductRepresentation", "Representations"));
                                        boolean goForIt2 = goForIt(list);
                                        if (goForIt2) {
                                            queryPart.addOid(next.getOid());
                                            written++;
                                            if (written >= maxObjectsPerFile) {
                                                processQuery(databaseSession, queryContext,
                                                        generateGeometryResult, ifcSerializerPlugin, settings,
                                                        renderEngineFilter, renderEnginePool, executor, eClass,
                                                        query, queryPart, false, null, written);
                                                query = new Query("Main " + eClass.getName(), packageMetaData);
                                                queryPart = query.createQueryPart();
                                                written = 0;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    next = queryObjectProvider2.next();
                }
                if (written > 0) {
                    processQuery(databaseSession, queryContext, generateGeometryResult, ifcSerializerPlugin,
                            settings, renderEngineFilter, renderEnginePool, executor, eClass, query, queryPart,
                            false, null, written);
                }
            }
        }

        allJobsPushed = true;

        executor.shutdown();
        executor.awaitTermination(24, TimeUnit.HOURS);

        // Need total bounds
        //         float[] quantizationMatrix = createQuantizationMatrixFromBounds(boundsMm);
        //         ByteBuffer verticesQuantized = quantizeVertices(vertices, quantizationMatrix, generateGeometryResult.getMultiplierToMm());
        //         geometryData.setAttribute(GeometryPackage.eINSTANCE.getGeometryData_VerticesQuantized(), verticesQuantized.array());

        LOGGER.debug("Generating quantized vertices");
        double[] quantizationMatrix = createQuantizationMatrixFromBounds(
                generateGeometryResult.getBoundsUntransformed(), multiplierToMm);
        for (Long id : geometryDataMap.keySet()) {
            Tuple<HashMapVirtualObject, ByteBuffer> tuple = geometryDataMap.get(id);

            HashMapVirtualObject buffer = new HashMapVirtualObject(queryContext,
                    GeometryPackage.eINSTANCE.getBuffer());
            //            Buffer buffer = databaseSession.create(Buffer.class);
            buffer.set("data",
                    quantizeVertices(tuple.getB().asDoubleBuffer(), quantizationMatrix, multiplierToMm)
                            .array());
            //            buffer.setData(quantizeVertices(tuple.getB(), quantizationMatrix, multiplierToMm).array());
            //            databaseSession.store(buffer);
            buffer.save();
            HashMapVirtualObject geometryData = tuple.getA();
            geometryData.set("verticesQuantized", buffer.getOid());
            int reused = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_Reused());
            int nrTriangles = (int) geometryData.eGet(GeometryPackage.eINSTANCE.getGeometryData_NrIndices())
                    / 3;
            int saveableTriangles = Math.max(0, (reused - 1)) * nrTriangles;
            geometryData.set("saveableTriangles", saveableTriangles);
            //            if (saveableTriangles > 0) {
            //               System.out.println("Saveable triangles: " + saveableTriangles);
            //            }
            geometryData.saveOverwrite();
        }

        long end = System.nanoTime();
        long total = totalBytes.get()
                - (bytesSavedByHash.get() + bytesSavedByTransformation.get() + bytesSavedByMapping.get());
        LOGGER.info("Rendertime: " + Formatters.nanosToString(end - start) + ", " + "Reused (by hash): "
                + Formatters.bytesToString(bytesSavedByHash.get()) + ", Reused (by transformation): "
                + Formatters.bytesToString(bytesSavedByTransformation.get()) + ", Reused (by mapping): "
                + Formatters.bytesToString(bytesSavedByMapping.get()) + ", Total: "
                + Formatters.bytesToString(totalBytes.get()) + ", Final: " + Formatters.bytesToString(total));
        if (report.getNumberOfDebugFiles() > 0) {
            LOGGER.error("Number of erroneous files: " + report.getNumberOfDebugFiles());
        }
        Map<String, Integer> skipped = report.getSkippedBecauseOfInvalidRepresentationIdentifier();
        if (skipped.size() > 0) {
            LOGGER.error("Number of representations skipped:");
            for (String identifier : skipped.keySet()) {
                LOGGER.error("\t" + identifier + ": " + skipped.get(identifier));
            }
        }
        String dump = geometryGenerationDebugger.dump();
        if (dump != null) {
            LOGGER.info(dump);
        }
    } catch (Exception e) {
        running = false;
        LOGGER.error("", e);
        report.setEnd(new GregorianCalendar());
        throw new GeometryGeneratingException(e);
    }
    report.setEnd(new GregorianCalendar());
    try {
        if (report.getNumberOfDebugFiles() > 0) {
            writeDebugFile();
        }
    } catch (IOException e) {
        LOGGER.debug("", e);
    }
    return generateGeometryResult;
}

From source file:org.dcache.util.histograms.HistogramModelTest.java

@Test
public void updateOnTimeframeHistogramShouldSumLastValue() throws NoSuchMethodException, InstantiationException,
        IllegalAccessException, InvocationTargetException {
    givenTimeframeHistogram();/*  w w w  . j av  a 2  s .  com*/
    givenQueueCountValuesFor(48);
    givenBinUnitOf((double) TimeUnit.HOURS.toMillis(1));
    givenBinCountOf(48);
    givenBinLabelOf(TimeUnit.HOURS.name());
    givenDataLabelOf("COUNT");
    givenHistogramTypeOf("Queued Movers");
    givenHighestBinOf(getHoursInThePastFromNow(0));
    whenConfigureIsCalled();
    assertThatUpdateSumsLastValue();
}

From source file:org.lilyproject.repository.impl.AbstractSchemaCache.java

/**
 * Refresh the caches and put the cacheWatcher again on the cache
 * invalidation zookeeper-node.//www .j  a va2 s  . c  om
 */
private void refreshAll() throws InterruptedException, RepositoryException {

    watchPathsForExistence();

    // Set a watch on the parent path, in case everything needs to be
    // refreshed
    try {
        Stat stat = new Stat();
        ZkUtil.getData(zooKeeper, CACHE_INVALIDATION_PATH, parentWatcher, stat);
        if (parentVersion == null || (stat.getVersion() != parentVersion)) {
            // An explicit refresh was triggered
            parentVersion = stat.getVersion();
            bucketVersions.clear();
        }
    } catch (KeeperException e) {
        if (Thread.currentThread().isInterrupted()) {
            if (log.isDebugEnabled()) {
                log.debug(
                        "Failed to put parent watcher on " + CACHE_INVALIDATION_PATH + " : thread interrupted");
            }
        } else {
            log.warn("Failed to put parent watcher on " + CACHE_INVALIDATION_PATH, e);
            // Failed to put our watcher.
            // Relying on the ConnectionWatcher to put it again and
            // initialize the caches.
        }
    }

    if (bucketVersions.isEmpty()) {
        // All buckets need to be refreshed

        if (log.isDebugEnabled()) {
            log.debug("Refreshing all types in the schema cache, no bucket versions known yet");
        }
        // Set a watch again on all buckets
        final ExecutorService sixteenThreads = Executors.newFixedThreadPool(50);
        for (final CacheWatcher watcher : cacheWatchers) {
            sixteenThreads.submit(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    String bucketId = watcher.getBucket();
                    String bucketPath = bucketPath(bucketId);
                    Stat stat = new Stat();
                    try {
                        ZkUtil.getData(zooKeeper, bucketPath, watcher, stat);
                        bucketVersions.put(bucketId, stat.getVersion());
                    } catch (KeeperException e) {
                        if (Thread.currentThread().isInterrupted()) {
                            if (log.isDebugEnabled()) {
                                log.debug("Failed to put watcher on bucket " + bucketPath
                                        + " : thread interrupted");
                            }
                        } else {
                            log.warn("Failed to put watcher on bucket " + bucketPath
                                    + " - Relying on connection watcher to reinitialize cache", e);
                            // Failed to put our watcher.
                            // Relying on the ConnectionWatcher to put it again and
                            // initialize the caches.
                        }
                    }

                    return null;
                }
            });
        }
        sixteenThreads.shutdown();
        sixteenThreads.awaitTermination(1, TimeUnit.HOURS);

        // Read all types in one go
        Pair<List<FieldType>, List<RecordType>> types = getTypeManager().getTypesWithoutCache();
        fieldTypesCache.refreshFieldTypes(types.getV1());
        updatedFieldTypes = true;
        recordTypes.refreshRecordTypes(types.getV2());
    } else {
        // Only the changed buckets need to be refreshed.
        // Upon a re-connection event it could be that some updates were
        // missed and the watches were not triggered.
        // By checking the version number of the buckets we know which
        // buckets to refresh.

        Map<String, Integer> newBucketVersions = new HashMap<String, Integer>();
        // Set a watch again on all buckets
        for (CacheWatcher watcher : cacheWatchers) {
            String bucketId = watcher.getBucket();
            String bucketPath = bucketPath(bucketId);
            Stat stat = new Stat();
            try {
                ZkUtil.getData(zooKeeper, bucketPath, watcher, stat);
                Integer oldVersion = bucketVersions.get(bucketId);
                if (oldVersion == null || (oldVersion != stat.getVersion())) {
                    newBucketVersions.put(bucketId, stat.getVersion());
                }
            } catch (KeeperException e) {
                if (Thread.currentThread().isInterrupted()) {
                    if (log.isDebugEnabled()) {
                        log.debug("Failed to put watcher on bucket " + bucketPath + " : thread is interrupted");
                    }
                } else {
                    log.warn("Failed to put watcher on bucket " + bucketPath
                            + " - Relying on connection watcher to reinitialize cache", e);
                    // Failed to put our watcher.
                    // Relying on the ConnectionWatcher to put it again and
                    // initialize the caches.
                }
            }
        }
        if (log.isDebugEnabled()) {
            log.debug("Refreshing all types in the schema cache, limiting to buckets"
                    + newBucketVersions.keySet());
        }
        for (Entry<String, Integer> entry : newBucketVersions.entrySet()) {
            bucketVersions.put(entry.getKey(), entry.getValue());
            TypeBucket typeBucket = getTypeManager().getTypeBucketWithoutCache(entry.getKey());
            fieldTypesCache.refreshFieldTypeBucket(typeBucket);
            updatedFieldTypes = true;
            recordTypes.refreshRecordTypeBucket(typeBucket);
        }
    }
}

From source file:com.linkedin.pinot.tools.admin.command.CreateSegmentCommand.java

@Override
public boolean execute() throws Exception {
    LOGGER.info("Executing command: {}", toString());

    // Load generator config if exist.
    final SegmentGeneratorConfig segmentGeneratorConfig;
    if (_generatorConfigFile != null) {
        segmentGeneratorConfig = new ObjectMapper().readValue(new File(_generatorConfigFile),
                SegmentGeneratorConfig.class);
    } else {/*from   w w w  .j  a v  a  2  s.c om*/
        segmentGeneratorConfig = new SegmentGeneratorConfig();
    }

    // Load config from segment generator config.
    String configDataDir = segmentGeneratorConfig.getDataDir();
    if (_dataDir == null) {
        if (configDataDir == null) {
            throw new RuntimeException("Must specify dataDir.");
        }
        _dataDir = configDataDir;
    } else {
        if (configDataDir != null && !configDataDir.equals(_dataDir)) {
            LOGGER.warn("Find dataDir conflict in command line and config file, use config in command line: {}",
                    _dataDir);
        }
    }

    FileFormat configFormat = segmentGeneratorConfig.getFormat();
    if (_format == null) {
        if (configFormat == null) {
            throw new RuntimeException("Format cannot be null in config file.");
        }
        _format = configFormat;
    } else {
        if (configFormat != _format && configFormat != FileFormat.AVRO) {
            LOGGER.warn("Find format conflict in command line and config file, use config in command line: {}",
                    _format);
        }
    }

    String configOutDir = segmentGeneratorConfig.getOutDir();
    if (_outDir == null) {
        if (configOutDir == null) {
            throw new RuntimeException("Must specify outDir.");
        }
        _outDir = configOutDir;
    } else {
        if (configOutDir != null && !configOutDir.equals(_outDir)) {
            LOGGER.warn("Find outDir conflict in command line and config file, use config in command line: {}",
                    _outDir);
        }
    }

    if (segmentGeneratorConfig.isOverwrite()) {
        _overwrite = true;
    }

    String configTableName = segmentGeneratorConfig.getTableName();
    if (_tableName == null) {
        if (configTableName == null) {
            throw new RuntimeException("Must specify tableName.");
        }
        _tableName = configTableName;
    } else {
        if (configTableName != null && !configTableName.equals(_tableName)) {
            LOGGER.warn(
                    "Find tableName conflict in command line and config file, use config in command line: {}",
                    _tableName);
        }
    }

    String configSegmentName = segmentGeneratorConfig.getSegmentName();
    if (_segmentName == null) {
        if (configSegmentName == null) {
            throw new RuntimeException("Must specify segmentName.");
        }
        _segmentName = configSegmentName;
    } else {
        if (configSegmentName != null && !configSegmentName.equals(_segmentName)) {
            LOGGER.warn(
                    "Find segmentName conflict in command line and config file, use config in command line: {}",
                    _segmentName);
        }
    }

    // Filter out all input files.
    File dir = new File(_dataDir);
    if (!dir.exists() || !dir.isDirectory()) {
        throw new RuntimeException("Data directory " + _dataDir + " not found.");
    }

    File[] files = dir.listFiles(new FilenameFilter() {
        @Override
        public boolean accept(File dir, String name) {
            return name.toLowerCase().endsWith(_format.toString().toLowerCase());
        }
    });

    if ((files == null) || (files.length == 0)) {
        throw new RuntimeException("Data directory " + _dataDir + " does not contain "
                + _format.toString().toUpperCase() + " files.");
    }

    // Make sure output directory does not already exist, or can be overwritten.
    File outDir = new File(_outDir);
    if (outDir.exists()) {
        if (!_overwrite) {
            throw new IOException("Output directory " + _outDir + " already exists.");
        } else {
            FileUtils.deleteDirectory(outDir);
        }
    }

    // Set other generator configs from command line.
    segmentGeneratorConfig.setDataDir(_dataDir);
    segmentGeneratorConfig.setFormat(_format);
    segmentGeneratorConfig.setOutDir(_outDir);
    segmentGeneratorConfig.setOverwrite(_overwrite);
    segmentGeneratorConfig.setTableName(_tableName);
    segmentGeneratorConfig.setSegmentName(_segmentName);
    if (_schemaFile != null) {
        if (segmentGeneratorConfig.getSchemaFile() != null
                && !segmentGeneratorConfig.getSchemaFile().equals(_schemaFile)) {
            LOGGER.warn(
                    "Find schemaFile conflict in command line and config file, use config in command line: {}",
                    _schemaFile);
        }
        segmentGeneratorConfig.setSchemaFile(_schemaFile);
    }
    if (_readerConfigFile != null) {
        if (segmentGeneratorConfig.getReaderConfigFile() != null
                && !segmentGeneratorConfig.getReaderConfigFile().equals(_readerConfigFile)) {
            LOGGER.warn(
                    "Find readerConfigFile conflict in command line and config file, use config in command line: {}",
                    _readerConfigFile);
        }
        segmentGeneratorConfig.setReaderConfigFile(_readerConfigFile);
    }
    if (_enableStarTreeIndex) {
        segmentGeneratorConfig.setEnableStarTreeIndex(true);
    }
    if (_starTreeIndexSpecFile != null) {
        if (segmentGeneratorConfig.getStarTreeIndexSpecFile() != null
                && !segmentGeneratorConfig.getStarTreeIndexSpecFile().equals(_starTreeIndexSpecFile)) {
            LOGGER.warn(
                    "Find starTreeIndexSpecFile conflict in command line and config file, use config in command line: {}",
                    _starTreeIndexSpecFile);
        }
        segmentGeneratorConfig.setStarTreeIndexSpecFile(_starTreeIndexSpecFile);
    }

    ExecutorService executor = Executors.newFixedThreadPool(_numThreads);
    int cnt = 0;
    for (final File file : files) {
        final int segCnt = cnt;

        executor.execute(new Runnable() {
            @Override
            public void run() {
                try {
                    SegmentGeneratorConfig config = new SegmentGeneratorConfig(segmentGeneratorConfig);
                    config.setInputFilePath(file.getAbsolutePath());
                    config.setSegmentName(_segmentName + "_" + segCnt);
                    config.loadConfigFiles();

                    final SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
                    driver.init(config);
                    driver.build();
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
            }
        });
        cnt += 1;
    }

    executor.shutdown();
    return executor.awaitTermination(1, TimeUnit.HOURS);
}

From source file:org.perfcake.util.Utils.java

/**
 * Converts time in milliseconds to H:MM:SS format, where H is unbound.
 *
 * @param time/*w ww. j  a  v  a 2s. c  o m*/
 *       The timestamp in milliseconds.
 * @return The string representing the timestamp in H:MM:SS format.
 */
public static String timeToHMS(final long time) {
    final long hours = TimeUnit.MILLISECONDS.toHours(time);
    final long minutes = TimeUnit.MILLISECONDS.toMinutes(time - TimeUnit.HOURS.toMillis(hours));
    final long seconds = TimeUnit.MILLISECONDS
            .toSeconds(time - TimeUnit.HOURS.toMillis(hours) - TimeUnit.MINUTES.toMillis(minutes));

    final StringBuilder sb = new StringBuilder();
    sb.append(hours).append(":").append(String.format("%02d", minutes)).append(":")
            .append(String.format("%02d", seconds));

    return sb.toString();
}

From source file:org.hawkular.alerter.elasticsearch.ElasticsearchAlerter.java

public static TimeUnit getIntervalUnit(String interval) {
    if (interval == null || interval.isEmpty()) {
        interval = INTERVAL_DEFAULT;/* w  w w.  j  a va2  s  . co  m*/
    }
    char unit = interval.charAt(interval.length() - 1);
    switch (unit) {
    case 'h':
        return TimeUnit.HOURS;
    case 's':
        return TimeUnit.SECONDS;
    case 'm':
    default:
        return TimeUnit.MINUTES;
    }
}

From source file:com.ibm.mil.readyapps.physio.fragments.ProgressFragment.java

private void fetchData(final TimeFilter filter) {
    fetchPainData(filter);/*w  ww  .j a  v  a  2  s  . c om*/

    Calendar calendar = Calendar.getInstance();
    Date endDate = new Date();
    calendar.setTime(endDate);

    TimeUnit unit;
    int interval;
    int bound;
    switch (filter) {
    case DAY:
        calendar.add(Calendar.DAY_OF_YEAR, -1);
        calendar.add(Calendar.HOUR, 1);
        unit = TimeUnit.HOURS;
        interval = 1;
        bound = DAY_BOUND;
        break;
    case WEEK:
        calendar.add(Calendar.WEEK_OF_YEAR, -1);
        unit = TimeUnit.DAYS;
        interval = 1;
        bound = WEEK_BOUND;
        break;
    case MONTH:
        calendar.add(Calendar.MONTH, -1);
        unit = TimeUnit.DAYS;
        interval = 7;
        bound = MONTH_BOUND;
        break;
    case YEAR:
    default:
        calendar.add(Calendar.YEAR, -1);
        calendar.add(Calendar.MONTH, 1);
        unit = TimeUnit.DAYS;
        interval = 30;
        bound = YEAR_BOUND;
        break;
    }

    Date startDate = calendar.getTime();

    HealthDataRetriever.Builder builder = new HealthDataRetriever.Builder().startDate(startDate)
            .endDate(endDate).timeUnit(unit).timeInterval(interval);

    HealthDataRetriever stepsRetriever = builder.dataType(HealthDataRetriever.DataType.STEPS)
            .handler(new HealthDataRetriever.Handler() {
                @Override
                public void handle(List<Integer> data) {
                    switch (filter) {
                    case DAY:
                        stepsDay = data;
                        break;
                    case WEEK:
                        stepsWeek = data;
                        break;
                    case MONTH:
                        stepsMonth = data;
                        break;
                    case YEAR:
                        stepsYear = data;
                        break;
                    }

                    verifyDataIntegrity();
                }
            }).build();
    stepsRetriever.retrieve(mClient);

    HealthDataRetriever heartRetriever = builder.dataType(HealthDataRetriever.DataType.HEART_RATE)
            .handler(new HealthDataRetriever.Handler() {
                @Override
                public void handle(List<Integer> data) {
                    switch (filter) {
                    case DAY:
                        heartDay = data;
                        break;
                    case WEEK:
                        heartWeek = data;
                        break;
                    case MONTH:
                        heartMonth = data;
                        break;
                    case YEAR:
                        heartYear = data;
                        break;
                    }

                    verifyDataIntegrity();
                }
            }).build();
    heartRetriever.retrieve(mClient);

    HealthDataRetriever weightRetriever = builder.dataType(HealthDataRetriever.DataType.WEIGHT)
            .handler(new HealthDataRetriever.Handler() {
                @Override
                public void handle(List<Integer> data) {
                    switch (filter) {
                    case DAY:
                        weightDay = data;
                        break;
                    case WEEK:
                        weightWeek = data;
                        break;
                    case MONTH:
                        weightMonth = data;
                        break;
                    case YEAR:
                        weightYear = data;
                        break;
                    }

                    verifyDataIntegrity();
                }
            }).build();
    weightRetriever.retrieve(mClient);

    calendar.setTime(endDate);
    int caloriesInterval = bound == DAY_BOUND ? Calendar.HOUR : Calendar.DAY_OF_YEAR;

    for (int i = 0; i < bound; i++) {
        endDate = calendar.getTime();
        calendar.add(caloriesInterval, -interval);
        startDate = calendar.getTime();

        HealthDataRetriever caloriesRetriever = builder.startDate(startDate).endDate(endDate)
                .dataType(HealthDataRetriever.DataType.CALORIES).handler(new HealthDataRetriever.Handler() {
                    @Override
                    public void handle(List<Integer> data) {
                        int sum = 0;
                        for (Integer datum : data) {
                            sum += datum;
                        }

                        switch (filter) {
                        case DAY:
                            if (caloriesDay == null) {
                                caloriesDay = new ArrayList<>();
                            }
                            caloriesDay.add(0, sum);
                            break;
                        case WEEK:
                            if (caloriesWeek == null) {
                                caloriesWeek = new ArrayList<>();
                            }
                            caloriesWeek.add(0, sum);
                            break;
                        case MONTH:
                            if (caloriesMonth == null) {
                                caloriesMonth = new ArrayList<>();
                            }
                            caloriesMonth.add(0, sum);
                            break;
                        case YEAR:
                            if (caloriesYear == null) {
                                caloriesYear = new ArrayList<>();
                            }
                            caloriesYear.add(0, sum);
                            break;
                        }

                        verifyDataIntegrity();
                    }
                }).build();
        caloriesRetriever.retrieve(mClient);
    }
}

From source file:gobblin.data.management.conversion.hive.validation.ValidationJob.java

private void runCountValidation() throws InterruptedException {
    try {//from ww  w  .  java2  s  .co  m
        // Validation results
        this.successfulConversions = Maps.newConcurrentMap();
        this.failedConversions = Maps.newConcurrentMap();
        this.warnConversions = Maps.newConcurrentMap();
        this.dataValidationFailed = Maps.newConcurrentMap();
        this.dataValidationSuccessful = Maps.newConcurrentMap();

        // Find datasets to validate
        Iterator<HiveDataset> iterator = this.datasetFinder.getDatasetsIterator();
        EventSubmitter.submit(Optional.of(this.eventSubmitter),
                EventConstants.VALIDATION_FIND_HIVE_TABLES_EVENT);

        while (iterator.hasNext()) {
            ConvertibleHiveDataset hiveDataset = (ConvertibleHiveDataset) iterator.next();
            try (AutoReturnableObject<IMetaStoreClient> client = hiveDataset.getClientPool().getClient()) {

                // Validate dataset
                log.info(String.format("Validating dataset: %s", hiveDataset));
                if (HiveUtils.isPartitioned(hiveDataset.getTable())) {
                    processPartitionedTable(hiveDataset, client);
                } else {
                    processNonPartitionedTable(hiveDataset);
                }
            }
        }

        // Wait for all validation queries to finish
        log.info(String.format("Waiting for %d futures to complete", this.futures.size()));

        this.exec.shutdown();
        this.exec.awaitTermination(4, TimeUnit.HOURS);

        boolean oneFutureFailure = false;
        // Check if there were any exceptions
        for (Future<Void> future : this.futures) {
            try {
                future.get();
            } catch (Throwable t) {
                log.error("getValidationOutputFromHive failed", t);
                oneFutureFailure = true;
            }
        }

        // Log validation results:
        // Validation results are consolidated into the successfulConversions and failedConversions
        // These are then converted into log lines in the Azkaban logs as done below
        for (Map.Entry<String, String> successfulConversion : this.successfulConversions.entrySet()) {
            log.info(String.format("Successful conversion: %s [%s]", successfulConversion.getKey(),
                    successfulConversion.getValue()));
        }
        for (Map.Entry<String, String> successfulConversion : this.warnConversions.entrySet()) {
            log.warn(String.format("No conversion found for: %s [%s]", successfulConversion.getKey(),
                    successfulConversion.getValue()));
        }
        for (Map.Entry<String, String> failedConverion : this.failedConversions.entrySet()) {
            log.error(String.format("Failed conversion: %s [%s]", failedConverion.getKey(),
                    failedConverion.getValue()));
        }

        for (Map.Entry<String, String> success : this.dataValidationSuccessful.entrySet()) {
            log.info(
                    String.format("Data validation successful: %s [%s]", success.getKey(), success.getValue()));
        }

        for (Map.Entry<String, String> failed : this.dataValidationFailed.entrySet()) {
            log.error(String.format("Data validation failed: %s [%s]", failed.getKey(), failed.getValue()));
        }

        if (!this.failedConversions.isEmpty() || !this.dataValidationFailed.isEmpty()) {
            throw new RuntimeException(String.format(
                    "Validation failed for %s conversions. See previous logs for exact validation failures",
                    failedConversions.size()));
        }
        if (oneFutureFailure) {
            throw new RuntimeException("At least one hive ddl failed. Check previous logs");
        }

    } catch (IOException e) {
        Throwables.propagate(e);
    }
}