91超碰碰碰碰久久久久久综合_超碰av人澡人澡人澡人澡人掠_国产黄大片在线观看画质优化_txt小说免费全本

溫馨提示×

溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊×
其他方式登錄
點擊 登錄注冊 即表示同意《億速云用戶服務條款》

Spark如何批量存取HBase

發布時間:2021-12-08 13:56:55 來源:億速云 閱讀:286 作者:小新 欄目:云計算

這篇文章將為大家詳細講解有關Spark如何批量存取HBase ,小編覺得挺實用的,因此分享給大家做個參考,希望大家閱讀完這篇文章后可以有所收獲。

FileAna.scala

object FileAna {

  //  val conf: Configuration = HBaseConfiguration.create()

  val hdfsPath = "hdfs://master:9000"
  val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").
      set("spark.driver.host", "192.168.1.127").
      setJars(List("/home/pang/woozoomws/spark-service.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))
    val sc = new SparkContext(conf)
    val rdd = sc.textFile("hdfs://master:9000/woozoom/msgfile.txt")
    val rdd2 = rdd.map(x => convertToHbase(anaMavlink(x)))

    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

    val jobConf = new JobConf(hbaseConf, this.getClass)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, "MissionItem")

    rdd2.saveAsHadoopDataset(jobConf)

    sc.stop()
  }

  def convertScanToString(scan: Scan) = {
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray)
  }

  def convertToHbase(msg: MAVLinkMessage) = {
    val p = new Put(Bytes.toBytes(UUID.randomUUID().toString()))
    if (msg.isInstanceOf[msg_mission_item]) {
      val missionItem = msg.asInstanceOf[msg_mission_item]
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("x"), Bytes.toBytes(missionItem.x))
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("y"), Bytes.toBytes(missionItem.y))
      p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("z"), Bytes.toBytes(missionItem.z))
    }
    (new ImmutableBytesWritable, p)
  }

  val anaMavlink = (str: String) => {
    val bytes = ByteAndHex.hexStringToBytes(str)
    QuickParser.parse(bytes).unpack()
  }
}

ReadHBase.scala

object ReadHBase {

  //  val conf: Configuration = HBaseConfiguration.create()

  val hdfsPath = "hdfs://master:9000"
  val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").
      set("spark.driver.host", "192.168.1.127").
      setJars(List("/home/pang/woozoomws/spark-service.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",
        "/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))
    val sc = new SparkContext(conf)

    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

    hbaseConf.set(TableInputFormat.INPUT_TABLE, "MissionItem")
    val scan = new Scan()
    hbaseConf.set(TableInputFormat.SCAN, convertScanToString(scan))
    val readRDD = sc.newAPIHadoopRDD(hbaseConf, classOf[TableInputFormat],
      classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],
      classOf[org.apache.hadoop.hbase.client.Result])

    val count = readRDD.count()
    println("Mission Item Count:" + count)

    sc.stop()
  }

  def convertScanToString(scan: Scan) = {
    val proto = ProtobufUtil.toScan(scan)
    Base64.encodeBytes(proto.toByteArray)
  }
}

關于“Spark如何批量存取HBase ”這篇文章就分享到這里了,希望以上內容可以對大家有一定的幫助,使各位可以學到更多知識,如果覺得文章不錯,請把它分享出去讓更多的人看到。

向AI問一下細節

免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。

AI

黄梅县| 博罗县| 建德市| 神池县| 乐清市| 庐江县| 方山县| 云南省| 九龙坡区| 千阳县| 武城县| 六安市| 安多县| 长岭县| 和田市| 陈巴尔虎旗| 荔波县| 财经| 宣武区| 民县| 怀化市| 兴文县| 油尖旺区| 五大连池市| 常宁市| 巴东县| 清丰县| 太湖县| 桐乡市| 巴楚县| 湄潭县| 郧西县| 手机| 金堂县| 青冈县| 左权县| 亚东县| 平原县| 南郑县| 云霄县| 巴中市|