Monday, 20 October 2014

SSH connection from your client machine to cloud server

SSH connection from your client machine to cloud server

Connect your Cloud server file system to your local client. This is the sample program of file 
uploading from your local machine to cloud server.
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpException;

public class ConnectSSH {

 public void connect(String dnsName, String privKey) throws IOException, SftpException {
     JSch jSch = new JSch();
     Session session  = null;
     ChannelSftp channel = null;
     try {
         //Authenticate through Private Key File
         jSch.addIdentity(privKey);

         //Give the user and dnsName
         session = jSch.getSession("huser", dnsName, 22);

         //Required if not a trusted host
         java.util.Properties config = new java.util.Properties();

         config.put("StrictHostKeyChecking", "no");
         session.setConfig(config);
         System.out.println("Connecting SSH to " + dnsName + " - Please wait for few minutes... ");
         session.connect();
         System.out.println(session.getHost());

         channel = (ChannelSftp)session.openChannel("sftp");
         channel.connect();
         channel.cd("/home/hadoop");
         File localFile = new File("/home/hadoop/work/hivedata.txt");
         channel.put(new FileInputStream(localFile), localFile.getName());
         System.out.println("--------File has been Copied----------");
         channel.disconnect();
         session.disconnect();
     } catch (JSchException e) {
         // TODO Auto-generated catch block
         e.printStackTrace();
     } finally {
      channel = null;
      session = null;
     }
 }

 public static void main(String[] args) throws SftpException {
     ConnectSSH ssh = new ConnectSSH();
     String privKey = "/home/hadoop/hadoop-key.pem";
     try {
         ssh.connect("xxx-xx-xx-xxx-xx.compute-1.amazonaws.com", privKey);
     } catch (IOException e) {
         // TODO Auto-generated catch block
         e.printStackTrace();
     }
 }

}

Hive JDBC Connection with your localhost or cloud hosted cluster

Hive JDBC Connection with your localhost or cloud hosted cluster


import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

public class HiveJdbcClient {
  private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

  /**
 * @param args
 * @throws SQLException
   */
  public static void main(String[] args) throws SQLException {
      try {
      Class.forName(driverName);
    } catch (ClassNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
      System.exit(1);
    }
    Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
    Statement stmt = con.createStatement();
    String tableName = "testHiveDriverTable";
    stmt.executeQuery("drop table " + tableName);
    ResultSet res = stmt.executeQuery("create table " + tableName + " (key int, value string)" + " ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ");
    // show tables
    String sql = "show tables '" + tableName + "'";
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    if (res.next()) {
      System.out.println(res.getString(1));
    }
    // describe table
    sql = "describe " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1) + "\t" + res.getString(2));
    }

    // load data into table
    // NOTE: filepath has to be local to the hive server
    String filepath = "/home/hadoop/work/hivedata.txt";
    sql = "load data local inpath '" + filepath + "' into table " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);

    // select * query
    sql = "select * from " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
    }

    // regular hive query
    sql = "select count(1) from " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1));
    }
  }
}

Note: To make your hive jdbc connection with amazon cloud server, Just add the 
localhost:10000 to your putty
Related Posts Plugin for WordPress, Blogger...