91超碰碰碰碰久久久久久综合_超碰av人澡人澡人澡人澡人掠_国产黄大片在线观看画质优化_txt小说免费全本

溫馨提示×

溫馨提示×

您好,登錄后才能下訂單哦!

密碼登錄×
登錄注冊×
其他方式登錄
點擊 登錄注冊 即表示同意《億速云用戶服務條款》

hadoop MapReduce java示例

發布時間:2020-10-07 22:44:11 來源:網絡 閱讀:314 作者:zjy1002261870 欄目:大數據

wordcount工作流程
input-> 拆分Split->映射map->派發Shuffle->縮減reduce->output
hadoop jar /usr/local/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.3.jar wordcount 10803060234.txt /output

package wordcount;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Test {

public Test() {
    // TODO Auto-generated constructor stub
}

public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    Configuration conf = new Configuration();
    conf.set("fs.defaultFS","hdfs://172.26.19.40:9000");
    conf.set("mapreduce.job.jar", "target/wc.jar");
    conf.set("mapreduce.framework.name", "yarn");
    conf.set("yarn.resourcemanager.hostname", "hmaster");
    conf.set("mapreduce.app-submission.cross-platform", "true");
    Job job = Job.getInstance(conf);
    job.setMapperClass(WordMapper.class);
    job.setReducerClass(WordReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    FileInputFormat.setInputPaths(job, "");
    FileOutputFormat.setOutputPath(job, new Path(""));

    job.waitForCompletion(true);
}

}

package wordcount;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WordMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
        throws IOException, InterruptedException {
    String lineValue = value.toString();
    String[] words = lineValue.split(" ");
    IntWritable cIntWritable = new IntWritable(1);
    for(String word : words) {
        context.write(new Text(word), cIntWritable);
    }
}

}

package wordcount;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WordReducer extends Reducer<Text, IntWritable, Text, LongWritable> {

@Override
protected void reduce(Text key, Iterable<IntWritable> values,
        Reducer<Text, IntWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {

     Long tmpCount = 0L;
     for(IntWritable value : values) {
         tmpCount = tmpCount + value.get();
     }

     context.write(key, new LongWritable(tmpCount));

}

}

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.skcc</groupId>
<artifactId>wordcount</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>wordcount</name>
<description>count the word</description>

<properties>
    <project.build.sourceencoding>UTF-8</project.build.sourceencoding>
    <hadoop.version>2.7.3</hadoop.version>
</properties>
<dependencies>
    <dependency>
        <groupId>junit</groupId>
        <artifactId>junit</artifactId>
        <version>4.12</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-client</artifactId>
        <version>${hadoop.version}</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <version>${hadoop.version}</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>${hadoop.version}</version>
    </dependency>
</dependencies>

</project>

向AI問一下細節

免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。

AI

巴彦县| 冷水江市| 洪雅县| 金寨县| 耒阳市| 尤溪县| 长岭县| 明水县| 麻江县| 青田县| 彭阳县| 鄢陵县| 晋城| 邛崃市| 台湾省| 南昌县| 博乐市| 崇义县| 大新县| 吴堡县| 邳州市| 宜昌市| 潢川县| 高要市| 临武县| 丰顺县| 沛县| 南投县| 卓尼县| 奉节县| 安丘市| 上虞市| 外汇| 苍梧县| 兰西县| 清苑县| 精河县| 哈尔滨市| 岳阳县| 金湖县| 蒙山县|