List of usage examples for com.fasterxml.jackson.databind ObjectMapper getTypeFactory
public TypeFactory getTypeFactory()
From source file:com.ethlo.geodata.restdocs.AbstractJacksonFieldSnippet.java
private void resolveFieldDescriptors(Map<String, FieldDescriptor> fieldDescriptors, Type type, ObjectMapper objectMapper, JavadocReader javadocReader, ConstraintReader constraintReader) throws JsonMappingException { FieldDocumentationGenerator generator = new FieldDocumentationGenerator(objectMapper.writer(), objectMapper.getDeserializationConfig(), javadocReader, constraintReader); List<FieldDescriptor> descriptors = generator.generateDocumentation(type, objectMapper.getTypeFactory()); for (FieldDescriptor descriptor : descriptors) { if (fieldDescriptors.get(descriptor.getPath()) == null) { fieldDescriptors.put(descriptor.getPath(), descriptor); }/*from w w w. j a v a 2 s . c om*/ } }
From source file:aasdntool.AASDNTool.java
@SuppressWarnings("unchecked") private void topologyActionPerformed(java.awt.event.ActionEvent evt) { try {//from w ww . ja v a 2 s. co m StringBuffer response = new StringBuffer(); URL obj = new URL("http://" + controllerIP + ":8080/wm/topology/links/json"); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); // optional default is GET con.setRequestMethod("GET"); int responseCode = con.getResponseCode(); System.out.println("\nSending 'GET' request to URL : " + "http://" + controllerIP + ":8080/wm/topology/links/json"); System.out.println("Response Code : " + responseCode); BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; while ((inputLine = in.readLine()) != null) { response.append(inputLine); } in.close(); ObjectMapper mapper = new ObjectMapper(); List<Topology> topology_links = mapper.readValue(response.toString(), mapper.getTypeFactory().constructCollectionType(List.class, Topology.class)); // Create a graph of the topology networkGraph = new SparseMultigraph<String, String>(); // Add switches to the graph for (Topology topo : topology_links) { networkGraph.addVertex(topo.getSrc_switch()); } for (Topology topo : topology_links) { networkGraph.addVertex(topo.getDst_switch()); } // Add links to the switches for (Topology topo : topology_links) { try { networkGraph.addEdge( topo.getSrc_switch().substring(topo.getSrc_switch().length() - 4) + " to " + topo.getDst_switch().substring(topo.getDst_switch().length() - 4), topo.getSrc_switch(), topo.getDst_switch()); } catch (java.lang.IllegalArgumentException e) { System.out.println("Double link detected"); } } } catch (Exception e) { System.out.println("Exception occured:" + e); } try { StringBuffer response = new StringBuffer(); URL obj = new URL("http://" + controllerIP + ":8080/wm/device/"); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); // optional default is GET con.setRequestMethod("GET"); int responseCode = con.getResponseCode(); System.out.println("\nSending 'GET' request to URL : " + "http://" + controllerIP + ":8080/wm/device/"); System.out.println("Response Code : " + responseCode); BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; while ((inputLine = in.readLine()) != null) { response.append(inputLine); } in.close(); ObjectMapper deviceMapper = new ObjectMapper(); List<Device> topology_links_devices = deviceMapper.readValue(response.toString(), deviceMapper.getTypeFactory().constructCollectionType(List.class, Device.class)); for (Device deviceLinks : topology_links_devices) { networkGraph.addVertex(deviceLinks.getMac().get(0)); ArrayList<AttachmentPoint> deviceLinkAttachementPoint = deviceLinks.getAttachmentPoint(); if (deviceLinkAttachementPoint != null && deviceLinkAttachementPoint.size() > 0) { networkGraph.addEdge( deviceLinks.getMac().get(0) + deviceLinkAttachementPoint.get(0).getSwitchDPID(), deviceLinks.getMac().get(0), deviceLinkAttachementPoint.get(0).getSwitchDPID()); } else { System.out.println("Device " + deviceLinks.getMac().get(0) + " is not connected to any switch"); } } // The Layout<V, E> is parameterized by the vertex and edge types Layout<Integer, String> layout = new CircleLayout(networkGraph); layout.setSize(new Dimension(800, 800)); // sets the initial size of // the layout space // The BasicVisualizationServer<V,E> is parameterized by the vertex // and edge types VisualizationViewer<Integer, String> vv = new VisualizationViewer<Integer, String>(layout); vv.setPreferredSize(new Dimension(1000, 1000)); // Sets the viewing // area size vv.getRenderContext().setVertexLabelTransformer(new ToStringLabeller()); vv.getRenderContext().setEdgeLabelTransformer(new ToStringLabeller()); final DefaultModalGraphMouse graphMouse = new DefaultModalGraphMouse(); vv.setGraphMouse(graphMouse); graphMouse.setMode(ModalGraphMouse.Mode.PICKING); vv.addGraphMouseListener(new GraphMouseListener() { public void graphClicked(Object vertex, MouseEvent mouseEvent) { if (mouseEvent.getClickCount() == 2) { System.out.println("Double clicked " + vertex); Login login = new Login(); login.initialize(vertex.toString()); } } public void graphPressed(Object arg0, MouseEvent arg1) { // TODO Auto-generated method stub } public void graphReleased(Object arg0, MouseEvent arg1) { // TODO Auto-generated method stub } }); JFrame frame = new JFrame("Topology"); //frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().add(vv); frame.pack(); frame.setVisible(true); } catch (Exception e) { System.out.println("Exception occured:" + e); } }
From source file:org.wso2.emm.agent.services.operation.OperationProcessor.java
/** * Set policy bundle.// w w w . ja va2 s. co m * * @param operation - Operation object. */ public void setPolicyBundle(org.wso2.emm.agent.beans.Operation operation) throws AndroidAgentException { if (isDeviceAdminActive()) { if (Preference.getString(context, Constants.PreferenceFlag.APPLIED_POLICY) != null) { operationManager.revokePolicy(operation); } String payload = operation.getPayLoad().toString(); if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Policy payload: " + payload); } PolicyOperationsMapper operationsMapper = new PolicyOperationsMapper(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); try { if (payload != null) { Preference.putString(context, Constants.PreferenceFlag.APPLIED_POLICY, payload); } List<Operation> operations = mapper.readValue(payload, mapper.getTypeFactory() .constructCollectionType(List.class, org.wso2.emm.agent.beans.Operation.class)); for (org.wso2.emm.agent.beans.Operation op : operations) { op = operationsMapper.getOperation(op); this.doTask(op); } operation.setStatus(context.getResources().getString(R.string.operation_value_completed)); operationManager.setPolicyBundle(operation); if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Policy applied"); } } catch (IOException e) { operation.setStatus(context.getResources().getString(R.string.operation_value_error)); operation.setOperationResponse("Error occurred while parsing policy bundle stream."); operationManager.setPolicyBundle(operation); throw new AndroidAgentException("Error occurred while parsing stream", e); } } else { operation.setStatus(context.getResources().getString(R.string.operation_value_error)); operation.setOperationResponse("Device administrator is not activated, hence cannot execute policies."); operationManager.setPolicyBundle(operation); throw new AndroidAgentException("Device administrator is not activated, hence cannot execute policies"); } }
From source file:org.wso2.iot.agent.services.operation.OperationProcessor.java
/** * Set policy bundle.//from w ww .j a v a 2 s. c om * * @param operation - Operation object. */ public void setPolicyBundle(org.wso2.iot.agent.beans.Operation operation) throws AndroidAgentException { if (isDeviceAdminActive()) { if (Preference.getString(context, Constants.PreferenceFlag.APPLIED_POLICY) != null) { operationManager.revokePolicy(operation); } String payload = operation.getPayLoad().toString(); if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Policy payload: " + payload); } PolicyOperationsMapper operationsMapper = new PolicyOperationsMapper(); ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); try { if (payload != null) { Preference.putString(context, Constants.PreferenceFlag.APPLIED_POLICY, payload); } List<Operation> operations = mapper.readValue(payload, mapper.getTypeFactory() .constructCollectionType(List.class, org.wso2.iot.agent.beans.Operation.class)); for (org.wso2.iot.agent.beans.Operation op : operations) { op = operationsMapper.getOperation(op); this.doTask(op); } operation.setStatus(context.getResources().getString(R.string.operation_value_completed)); operationManager.setPolicyBundle(operation); if (Constants.DEBUG_MODE_ENABLED) { Log.d(TAG, "Policy applied"); } } catch (IOException e) { operation.setStatus(context.getResources().getString(R.string.operation_value_error)); operation.setOperationResponse("Error occurred while parsing policy bundle stream."); operationManager.setPolicyBundle(operation); throw new AndroidAgentException("Error occurred while parsing stream", e); } } else { operation.setStatus(context.getResources().getString(R.string.operation_value_error)); operation.setOperationResponse("Device administrator is not activated, hence cannot execute policies."); operationManager.setPolicyBundle(operation); throw new AndroidAgentException("Device administrator is not activated, hence cannot execute policies"); } }
From source file:com.fer.hr.web.rest.resources.Query2Resource.java
/** * Drill across on a result set/*from ww w . ja v a 2 s.c o m*/ * @summary Drill across * @param queryName The query name * @param position The drill position * @param returns The dimensions and levels returned * @return The new thin query object. */ @POST @Produces({ "application/json" }) @Path("/{queryname}/drillacross") public ThinQuery drillacross(@PathParam("queryname") String queryName, @FormParam("position") String position, @FormParam("drill") String returns) { if (log.isDebugEnabled()) { log.debug("TRACK\t" + "\t/query/" + queryName + "/drillacross\tPOST"); } try { String[] positions = position.split(":"); List<Integer> cellPosition = new ArrayList<>(); for (String p : positions) { Integer pInt = Integer.parseInt(p); cellPosition.add(pInt); } ObjectMapper mapper = new ObjectMapper(); CollectionType ct = mapper.getTypeFactory().constructCollectionType(ArrayList.class, String.class); JavaType st = mapper.getTypeFactory().uncheckedSimpleType(String.class); Map<String, List<String>> levels = mapper.readValue(returns, mapper.getTypeFactory().constructMapType(Map.class, st, ct)); return thinQueryService.drillacross(queryName, cellPosition, levels); } catch (Exception e) { log.error("Cannot execute query (" + queryName + ")", e); String error = ExceptionUtils.getRootCauseMessage(e); throw new WebApplicationException(Response.serverError().entity(error).build()); } }
From source file:org.efaps.esjp.admin.update.UpdatePack.java
/** * Check revisions.//w w w . ja va 2 s. c o m * * @param _parameter Parameter as passed by the eFaps API * @return the return * @throws EFapsException on error * @throws InstallationException on error */ public Return execute(final Parameter _parameter) throws EFapsException, InstallationException { final Context context = Context.getThreadContext(); final Context.FileParameter fileItem = context.getFileParameters().get("pack"); final boolean compress = GzipUtils.isCompressedFilename(fileItem.getName()); try (final TarArchiveInputStream tarInput = new TarArchiveInputStream( compress ? new GzipCompressorInputStream(fileItem.getInputStream()) : fileItem.getInputStream());) { File tmpfld = AppConfigHandler.get().getTempFolder(); if (tmpfld == null) { final File temp = File.createTempFile("eFaps", ".tmp"); tmpfld = temp.getParentFile(); temp.delete(); } final File updateFolder = new File(tmpfld, Update.TMPFOLDERNAME); if (!updateFolder.exists()) { updateFolder.mkdirs(); } final File dateFolder = new File(updateFolder, ((Long) new Date().getTime()).toString()); dateFolder.mkdirs(); final Map<String, URL> files = new HashMap<>(); TarArchiveEntry currentEntry = tarInput.getNextTarEntry(); while (currentEntry != null) { final byte[] bytess = new byte[(int) currentEntry.getSize()]; tarInput.read(bytess); final File file = new File(dateFolder.getAbsolutePath() + "/" + currentEntry.getName()); file.getParentFile().mkdirs(); final FileOutputStream output = new FileOutputStream(file); output.write(bytess); output.close(); files.put(currentEntry.getName(), file.toURI().toURL()); currentEntry = tarInput.getNextTarEntry(); } final Map<RevItem, InstallFile> installFiles = new HashMap<>(); final URL json = files.get("revisions.json"); final ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); final List<RevItem> items = mapper.readValue(new File(json.toURI()), mapper.getTypeFactory().constructCollectionType(List.class, RevItem.class)); final List<RevItem> allItems = new ArrayList<>(); allItems.addAll(items); installFiles.putAll(getInstallFiles(files, items, CIAdmin.Abstract)); installFiles.putAll(getInstallFiles(files, items, CIAdminUser.Abstract)); installFiles.putAll(getInstallFiles(files, items, CIAdminAccess.AccessSet)); installFiles.putAll(getInstallFiles(files, items, CICommon.DBPropertiesBundle)); final Iterator<RevItem> iter = items.iterator(); int i = 0; while (iter.hasNext()) { final RevItem item = iter.next(); LOG.info("Adding unfound Item {} / {}: {}", i, items.size(), item.getIdentifier()); final InstallFile installFile = new InstallFile().setName(item.getName4InstallFile()) .setURL(item.getURL(files)).setType(item.getFileType().getType()) .setRevision(item.getRevision()).setDate(item.getDate()); installFiles.put(item, installFile); i++; } final List<InstallFile> installFileList = new ArrayList<>(installFiles.values()); Collections.sort(installFileList, new Comparator<InstallFile>() { @Override public int compare(final InstallFile _installFile0, final InstallFile _installFile1) { return _installFile0.getName().compareTo(_installFile1.getName()); } }); final List<InstallFile> dependendFileList = new ArrayList<>(); // check if a object that depends on another object must be added to the update final Map<String, String> depenMap = getDependendMap(); final Set<String> tobeAdded = new HashSet<>(); for (final RevItem item : installFiles.keySet()) { if (depenMap.containsKey(item.getIdentifier())) { tobeAdded.add(depenMap.get(item.getIdentifier())); } } if (!tobeAdded.isEmpty()) { // check if the object to be added is already part ot the list for (final RevItem item : installFiles.keySet()) { final Iterator<String> tobeiter = tobeAdded.iterator(); while (tobeiter.hasNext()) { final String ident = tobeiter.next(); if (item.getIdentifier().equals(ident)) { tobeiter.remove(); } } } } if (!tobeAdded.isEmpty()) { i = 1; // add the objects to the list taht are missing for (final RevItem item : allItems) { if (tobeAdded.contains(item.getIdentifier())) { LOG.info("Adding releated Item {} / {}: {}", i, tobeAdded.size(), item); final InstallFile installFile = new InstallFile().setName(item.getName4InstallFile()) .setURL(item.getURL(files)).setType(item.getFileType().getType()) .setRevision(item.getRevision()).setDate(item.getDate()); dependendFileList.add(installFile); i++; } } } if (!installFileList.isEmpty()) { final Install install = new Install(true); for (final InstallFile installFile : installFileList) { LOG.info("...Adding to Update: '{}' ", installFile.getName()); install.addFile(installFile); } install.updateLatest(null); } if (!dependendFileList.isEmpty()) { LOG.info("Update for related Items"); final Install install = new Install(true); for (final InstallFile installFile : dependendFileList) { LOG.info("...Adding to Update: '{}' ", installFile.getName()); install.addFile(installFile); } install.updateLatest(null); } LOG.info("Terminated update."); } catch (final IOException e) { LOG.error("Catched", e); } catch (final URISyntaxException e) { LOG.error("Catched", e); } return new Return(); }
From source file:com.fer.hr.web.rest.resources.Query2Resource.java
/** * Zoom into a query result table./*from ww w. j ava 2 s. c o m*/ * @summary Zoom in. * @param queryName The query name * @param positionListString The zoom position * @return A new thin query model with a reduced table. */ @POST @Consumes("application/x-www-form-urlencoded") @Path("/{queryname}/zoomin") public ThinQuery zoomIn(@PathParam("queryname") String queryName, @FormParam("selections") String positionListString) { try { if (log.isDebugEnabled()) { log.debug("TRACK\t" + "\t/query/" + queryName + "/zoomIn\tPUT"); } List<List<Integer>> realPositions = new ArrayList<>(); if (StringUtils.isNotBlank(positionListString)) { ObjectMapper mapper = new ObjectMapper(); String[] positions = mapper.readValue(positionListString, mapper.getTypeFactory().constructArrayType(String.class)); if (positions != null && positions.length > 0) { for (String position : positions) { String[] rPos = position.split(":"); List<Integer> cellPosition = new ArrayList<>(); for (String p : rPos) { Integer pInt = Integer.parseInt(p); cellPosition.add(pInt); } realPositions.add(cellPosition); } } } return thinQueryService.zoomIn(queryName, realPositions); } catch (Exception e) { log.error("Cannot zoom in on query (" + queryName + ")", e); throw new WebApplicationException(e); } }
From source file:org.hawkular.metrics.clients.ptrans.fullstack.CollectdITest.java
private List<Point> getServerData() throws Exception { ObjectMapper objectMapper = new ObjectMapper(); HttpURLConnection urlConnection = (HttpURLConnection) new URL(findNumericMetricsUrl).openConnection(); urlConnection.connect();//from w ww . j a va 2 s .co m int responseCode = urlConnection.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_OK) { String msg = "Could not get metrics list from server: %s, %d"; fail(String.format(Locale.ROOT, msg, findNumericMetricsUrl, responseCode)); } List<String> metricNames; try (InputStream inputStream = urlConnection.getInputStream()) { TypeFactory typeFactory = objectMapper.getTypeFactory(); CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricName.class); List<MetricName> value = objectMapper.readValue(inputStream, valueType); metricNames = value.stream().map(MetricName::getId).collect(toList()); } Stream<Point> points = Stream.empty(); for (String metricName : metricNames) { String[] split = metricName.split("\\."); String type = split[split.length - 1]; urlConnection = (HttpURLConnection) new URL(findNumericDataUrl(metricName)).openConnection(); urlConnection.connect(); responseCode = urlConnection.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_OK) { fail("Could not load metric data from server: " + responseCode); } try (InputStream inputStream = urlConnection.getInputStream()) { TypeFactory typeFactory = objectMapper.getTypeFactory(); CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricData.class); List<MetricData> data = objectMapper.readValue(inputStream, valueType); Stream<Point> metricPoints = data.stream() .map(metricData -> new Point(type, metricData.timestamp, metricData.value)); points = Stream.concat(points, metricPoints); } } return points.sorted(Comparator.comparing(Point::getType).thenComparing(Point::getTimestamp)) .collect(toList()); }
From source file:uk.ac.ebi.metabolights.webservice.client.MetabolightsWsClient.java
private <T> RestResponse<T> deserializeJSONString(String response, Class<T> valueType) { logger.debug("Parsing json response into MetaboLights model: " + response); // Parse response (json) into Study entity... // Add guava serialization for multimaps (Table.Fields is a multimap now). ObjectMapper mapper = new ObjectMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); try {/*from w ww . j a v a2s .c o m*/ JavaType type = mapper.getTypeFactory().constructParametricType(RestResponse.class, valueType); return mapper.readValue(response, type); } catch (IOException e) { logger.error("Can't parse ws response (json) back into " + valueType.getName() + ": " + e.getMessage()); logger.debug("Response is: " + response); } return null; }
From source file:org.flockdata.integration.FileProcessor.java
private int processJsonTags(String fileName) throws FlockException { Collection<TagInputBean> tags; ObjectMapper mapper = FdJsonObjectMapper.getObjectMapper(); int processed = 0; try {//from www . ja va2s .com File file = new File(fileName); InputStream stream = null; if (!file.exists()) { // Try as a resource stream = ClassLoader.class.getResourceAsStream(fileName); if (stream == null) { logger.error("{} does not exist", fileName); throw new FlockException(fileName + " Does not exist"); } } TypeFactory typeFactory = mapper.getTypeFactory(); CollectionType collType = typeFactory.constructCollectionType(ArrayList.class, TagInputBean.class); if (file.exists()) tags = mapper.readValue(file, collType); else tags = mapper.readValue(stream, collType); for (TagInputBean tag : tags) { getPayloadWriter().writeTag(tag, "JSON Tag Importer"); processed++; } } catch (IOException e) { logger.error("Error writing exceptions with {} [{}]", fileName, e.getMessage()); throw new RuntimeException("IO Exception ", e); } finally { if (processed > 0L) getPayloadWriter().flush(); } return tags.size(); }