这篇文章主要为大家展示了“storm如何配置使用”,内容简而易懂,条理清晰,希望能够帮助大家解决疑惑,下面让小编带领大家一起研究并学习一下“storm如何配置使用”这篇文章吧。
示例代码如下:
#storm.yaml 配置#zookeeper storm.zookeeper.servers: - "bigdata01" - "bigdata02" - "bigdata03"#本地存放数据的路径storm.local.dir: "/apps/storm"#nimbus masternimbus.seeds: ["bigdata00"]#workder端口supervisor.slots.ports: - 6700 - 6701 - 6702 - 6703启动命令bin/ nohup storm nimbus &bin/ nohup storm supervisor &bin/ nohup storm ui &--------------------------------------------------------------------------------------package com.hgs.storm;import java.util.Map;import java.util.concurrent.ConcurrentHashMap;import org.apache.storm.Config;import org.apache.storm.LocalCluster;import org.apache.storm.generated.AlreadyAliveException;import org.apache.storm.generated.AuthorizationException;import org.apache.storm.generated.InvalidTopologyException;import org.apache.storm.spout.SpoutOutputCollector;import org.apache.storm.task.OutputCollector;import org.apache.storm.task.TopologyContext;import org.apache.storm.topology.IRichBolt;import org.apache.storm.topology.OutputFieldsDeclarer;import org.apache.storm.topology.TopologyBuilder;import org.apache.storm.topology.base.BaseRichBolt;import org.apache.storm.topology.base.BaseRichSpout;import org.apache.storm.tuple.Fields;import org.apache.storm.tuple.Tuple;import org.apache.storm.tuple.Values;public class StormWordCountTest {public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException, InterruptedException {TopologyBuilder builder = new TopologyBuilder();builder.setSpout("wordspout", new WordCountSpout(), 3);builder.setBolt("splitword", (IRichBolt) new WordSpliteBolt(), 2).shuffleGrouping("wordspout");//word 是splitword发出的字段,如第九十行builder.setBolt("wordcount", new WordCountBolt(), 2).fieldsGrouping("splitword", new Fields("word"));Config config = new Config();config.setNumWorkers(2);LocalCluster cluster = new LocalCluster();cluster.submitTopology("words-count", config, builder.createTopology());}}class WordCountSpout extends BaseRichSpout{private static final long serialVersionUID = 1L;//从open方法中的到collector,用于declareOutputFields 方法发出字段信息SpoutOutputCollector collector = null;@Overridepublic void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {this.collector = collector;}@Overridepublic void nextTuple() {collector.emit(new Values(" this is my first storm program so i hope it will success"));}@Overridepublic void declareOutputFields(OutputFieldsDeclarer declarer) {declarer.declare( new Fields("message"));}}class WordSpliteBolt extends BaseRichBolt{private static final long serialVersionUID = 1L;OutputCollector collector = null;@Overridepublic void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {this.collector = collector;}@Overridepublic void execute(Tuple input) {String line = input.getString(0);String[] words = line.split(" ");for(String wd : words) {collector.emit(new Values(wd ,1));}}@Overridepublic void declareOutputFields(OutputFieldsDeclarer declarer) {declarer.declare(new Fields("word","num"));}}class WordCountBolt extends BaseRichBolt{ConcurrentHashMap<String, Integer> wordsMap = new ConcurrentHashMap<String, Integer>();private static final long serialVersionUID = 1L;OutputCollector collector = null;@Overridepublic void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {this.collector = collector;}@Overridepublic void execute(Tuple input) {String word = input.getString(0);Integer num = input.getInteger(1);if(wordsMap.containsKey(word)) {wordsMap.put(word, wordsMap.get(word)+num);}else {wordsMap.put(word, num);}System.out.println(word +"----"+wordsMap.get(word));}@Overridepublic void declareOutputFields(OutputFieldsDeclarer declarer) {}}
以上是“storm如何配置使用”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注编程网行业资讯频道!