Java tutorial
/** * Flamingo HDFS File Uploader - a tool to upload from datasource to datasource and schedule jobs * * Copyright (C) 2011-2012 Cloudine. * * This file is part of Flamingo HDFS File Uploader. * * Flamingo HDFS File Uploader is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Flamingo HDFS File Uploader is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.openflamingo.uploader.handler; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.mortbay.jetty.HttpStatus; import org.openflamingo.uploader.JobContext; import org.openflamingo.uploader.exception.SystemException; import org.openflamingo.uploader.jaxb.*; import org.openflamingo.uploader.util.ExceptionUtils; import org.openflamingo.uploader.util.FileSystemScheme; import org.openflamingo.uploader.util.FileSystemUtils; import org.openflamingo.uploader.util.StringUtils; import org.slf4j.Logger; import org.springframework.http.client.ClientHttpRequest; import org.springframework.http.client.ClientHttpResponse; import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; import org.springframework.util.FileCopyUtils; import java.nio.charset.Charset; import java.util.List; import static org.openflamingo.uploader.util.FileSystemUtils.*; import static org.springframework.http.HttpMethod.GET; import static org.springframework.http.HttpMethod.POST; /** * HTTP URL ? ? ? . * * @author Edward KIM * @since 0.1 */ public class HttpToLocalHandler implements Handler { /** * Job Logger */ private Logger jobLogger; /** * ? ?? ?. ? ? ? ?? ? ?? * ? ?? . */ public static final String PROCESSING_FILE_QUALIFIER = ".processing"; // FIXME /** * HDFS URL? Hadoop Configuration Key */ public final static String HDFS_URL = "fs.default.name"; /** * Job Tracker? Hadoop Configuration Key */ public final static String JOB_TRACKER = "mapred.job.tracker"; /** * HDFS File Uploader Job Context */ private JobContext jobContext; /** * HDFS File Uploder Job */ private Job job; /** * Job? ?? Ingress */ private Http http; /** * Default HTTP Content Type */ private static final String DEFAULT_CONTENT_TYPE = "plain/text"; /** * Default HTTP Request Body Content Type */ private static final String DEFAULT_CHAR_SET = "UTF-8"; /** * ??. * * @param jobContext Flamingo HDFS File Uploader? Job Context * @param job Job * @param http Http Ingress * @param jobLogger Job Logger for trace */ public HttpToLocalHandler(JobContext jobContext, Job job, Http http, Logger jobLogger) { this.jobContext = jobContext; this.job = job; this.http = http; this.jobLogger = jobLogger; } @Override public void execute() throws Exception { String response = getResponse(http); String type = jobContext.getValue(http.getTarget().getType()); String filename = jobContext.getValue(http.getTarget().getFilename()); String target = jobContext.getValue(http.getTarget().getDirectory()); if ("LOCAL".equals(type)) { String targetPath = correctPath(target); FileSystem fs = FileSystemUtils.getFileSystem(targetPath); saveResponseToFS(response, fs, targetPath, filename); } else { String cluster = assertNotEmpty(jobContext.getValue(http.getTarget().getCluster())); Configuration configuration = getConfiguration(cluster); String targetPath = jobContext.getValue(configuration.get(HDFS_URL) + target); FileSystem fs = FileSystemUtils.getFileSystem(targetPath); saveResponseToFS(response, fs, targetPath, filename); } } /** * HTTP URL? . * * @param http HTTP * @return HTTP Response String * @throws Exception HTTP ? ?? ? ? */ private String getResponse(Http http) throws Exception { jobLogger.info("HTTP URL? ? ."); String url = jobContext.getValue(http.getUrl()); String method = jobContext.getValue(http.getMethod().getType()); String body = jobContext.getValue(http.getBody()); jobLogger.info("HTTP URL Information :"); jobLogger.info("\tURL = {}", url); jobLogger.info("\tMethod = {}", method); HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(); ClientHttpRequest request = null; if ("POST".equals(method)) { request = factory.createRequest(new java.net.URI(url), POST); } else { request = factory.createRequest(new java.net.URI(url), GET); } if (http.getHeaders() != null && http.getHeaders().getHeader().size() > 0) { List<Header> header = http.getHeaders().getHeader(); jobLogger.info("HTTP Header :", new String[] {}); for (Header h : header) { String name = h.getName(); String value = jobContext.getValue(h.getValue()); request.getHeaders().add(name, value); jobLogger.info("\t{} = {}", name, value); } } String responseBodyAsString = null; ClientHttpResponse response = null; try { response = request.execute(); responseBodyAsString = new String(FileCopyUtils.copyToByteArray(response.getBody()), Charset.defaultCharset()); jobLogger.debug("HTTP ? ? ? .\n{}", responseBodyAsString); jobLogger.info("HTTP ? . ? '{}({})'.", response.getStatusText(), response.getRawStatusCode()); if (response.getRawStatusCode() != HttpStatus.ORDINAL_200_OK) { throw new SystemException(ExceptionUtils.getMessage( "HTTP URL ? . ? OK '{}({})' ? .", response.getStatusText(), response.getRawStatusCode())); } } catch (Exception ex) { ex.printStackTrace(); throw new SystemException( ExceptionUtils.getMessage("HTTP URL ? . ? : {}", ExceptionUtils.getRootCause(ex).getMessage()), ex); } finally { try { response.close(); } catch (Exception ex) { // Ignored } } return responseBodyAsString; } /** * HTTP Response ? ?. * * @param response HTTP Response * @param fs FileSystem * @param targetDirectory Target Directory * @param filename Filename */ private void saveResponseToFS(String response, FileSystem fs, String targetDirectory, String filename) { FSDataOutputStream dos = null; Path path = new Path(targetDirectory, filename); try { dos = fs.create(path); org.apache.commons.io.IOUtils.write(response.getBytes(), dos); jobLogger.info("HTTP Response? '{}' ?? ?.", path); } catch (Exception ex) { throw new SystemException( ExceptionUtils.getMessage("HTTP ?? ? '{}'? ? .", path), ex); } finally { if (dos != null) { org.apache.commons.io.IOUtils.closeQuietly(dos); } } } @Override public void validate() { assertNotEmpty(jobContext.getValue(http.getUrl())); String method = assertNotEmpty(jobContext.getValue(http.getMethod().getType())); if ("POST".equals(method)) { assertNotEmpty(jobContext.getValue(http.getBody())); } String type = jobContext.getValue(http.getTarget().getType()); String directory = assertNotEmpty(jobContext.getValue(http.getTarget().getDirectory())); assertNotEmpty(jobContext.getValue(http.getTarget().getFilename())); if ("LOCAL".equals(type)) { String targetPath = correctPath(directory); checkScheme(targetPath, FileSystemScheme.LOCAL); testCreateDir(new Path(targetPath)); } else { String cluster = assertNotEmpty(jobContext.getValue(http.getTarget().getCluster())); Configuration configuration = getConfiguration(cluster); String targetPath = jobContext.getValue(configuration.get(HDFS_URL) + directory); checkScheme(targetPath, FileSystemScheme.HDFS); testCreateDir(new Path(targetPath)); } } /** * Hadoop Cluster? ? Cluster? Hadoop Configuration? ?. * * @param clusterName Hadoop Cluster * @return {@link org.apache.hadoop.conf.Configuration} */ public Configuration getConfiguration(String clusterName) { org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration(); List<Cluster> clusters = jobContext.getModel().getClusters().getCluster(); for (Cluster cluster : clusters) { if (clusterName.equals(cluster.getName())) { configuration.set(HDFS_URL, cluster.getFsDefaultName()); configuration.set(JOB_TRACKER, cluster.getMapredJobTracker()); List<Property> properties = cluster.getProperties().getProperty(); for (Property property : properties) { configuration.set(property.getName(), property.getValue()); } } } return configuration; } /** * ? ? ?. * * @param value ? ? * @return ? ? * @throws SystemException ? NULL? */ protected String assertNotEmpty(String value) { if (StringUtils.isEmpty(value)) { throw new SystemException(ExceptionUtils .getMessage("NULL ? ?? ? .", value)); } return value; } }