storm 引入redis ,主要是使用redis缓存库暂存storm的计算结果,然后redis供其他应用调用取出数据。
新建maven工程
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>storm07</groupId> <artifactId>storm07</artifactId> <version>0.0.1-SNAPSHOT</version> <packaging>jar</packaging> <name>storm07</name> <url>http://maven.apache.org</url> <repositories> <!-- Repository where we can found the storm dependencies --> <repository> <id>clojars.org</id> <url>http://clojars.org/repo</url> </repository> </repositories> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>org.apache.storm</groupId> <artifactId>storm-core</artifactId> <version>0.9.2-incubating</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> <version>2.1.1</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>2.1.1</version> </dependency> <dependency> <groupId>redis.clients</groupId> <artifactId>jedis</artifactId> <version>2.7.2</version> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.11</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-slf4j-impl</artifactId> <version>2.0-beta9</version> </dependency> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-1.2-api</artifactId> <version>2.0-beta9</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>log4j-over-slf4j</artifactId> <version>1.7.10</version> </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> <version>1.7.10</version> </dependency> </dependencies> <build> <finalName>storm07</finalName> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-war-plugin</artifactId> <version>2.4</version> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>2.1</version> <configuration> <source>1.7</source> <target>1.7</target> </configuration> </plugin> <!-- 单元测试 --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <configuration> <skip>true</skip> <includes> <include>**/*Test*.java</include> </includes> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>2.1.2</version> <executions> <!-- 绑定到特定的生命周期之后,运行maven-source-pluin 运行目标为jar-no-fork --> <execution> <phase>package</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </project>
Topology
package bhz.storm.redis.example; import java.util.ArrayList; import java.util.List; import backtype.storm.Config; import backtype.storm.LocalCluster; import backtype.storm.generated.AlreadyAliveException; import backtype.storm.generated.InvalidTopologyException; import backtype.storm.topology.TopologyBuilder; public class Topology { public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException { TopologyBuilder builder = new TopologyBuilder(); List<String> zks = new ArrayList<String>(); zks.add("192.168.1.114"); List<String> cFs = new ArrayList<String>(); cFs.add("personal"); cFs.add("company"); // set the spout class builder.setSpout("spout", new SampleSpout(), 2); // set the bolt class builder.setBolt("bolt", new StormRedisBolt("192.168.1.114",6379), 2).shuffleGrouping("spout"); Config conf = new Config(); conf.setDebug(true); // create an instance of LocalCluster class for // executing topology in local mode. LocalCluster cluster = new LocalCluster(); // StormRedisTopology is the name of submitted topology. cluster.submitTopology("StormRedisTopology", conf, builder.createTopology()); try { Thread.sleep(10000); } catch (Exception exception) { System.out.println("Thread interrupted exception : " + exception); } // kill the StormRedisTopology cluster.killTopology("StormRedisTopology"); // shutdown the storm test cluster cluster.shutdown(); } }
spout
package bhz.storm.redis.example; import java.util.HashMap; import java.util.Map; import java.util.Random; import backtype.storm.spout.SpoutOutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseRichSpout; import backtype.storm.tuple.Fields; import backtype.storm.tuple.Values; public class SampleSpout extends BaseRichSpout { private static final long serialVersionUID = 1L; private SpoutOutputCollector spoutOutputCollector; private static final Map<Integer, String> FIRSTNAMEMAP = new HashMap<Integer, String>(); static { FIRSTNAMEMAP.put(0, "john"); FIRSTNAMEMAP.put(1, "nick"); FIRSTNAMEMAP.put(2, "mick"); FIRSTNAMEMAP.put(3, "tom"); FIRSTNAMEMAP.put(4, "jerry"); } private static final Map<Integer, String> LASTNAME = new HashMap<Integer, String>(); static { LASTNAME.put(0, "anderson"); LASTNAME.put(1, "watson"); LASTNAME.put(2, "ponting"); LASTNAME.put(3, "dravid"); LASTNAME.put(4, "lara"); } private static final Map<Integer, String> COMPANYNAME = new HashMap<Integer, String>(); static { COMPANYNAME.put(0, "abc"); COMPANYNAME.put(1, "dfg"); COMPANYNAME.put(2, "pqr"); COMPANYNAME.put(3, "ecd"); COMPANYNAME.put(4, "awe"); } public void open(Map conf, TopologyContext context, SpoutOutputCollector spoutOutputCollector) { // Open the spout this.spoutOutputCollector = spoutOutputCollector; } public void nextTuple() { // Storm cluster repeatedly call this method to emit the continuous // // stream of tuples. final Random rand = new Random(); // generate the random number from 0 to 4. int randomNumber = rand.nextInt(5); spoutOutputCollector.emit (new Values(FIRSTNAMEMAP.get(randomNumber),LASTNAME.get(randomNumber),COMPANYNAME.get(randomNumber))); } public void declareOutputFields(OutputFieldsDeclarer declarer) { // emit the field site. declarer.declare(new Fields("firstName","lastName","companyName")); } }
StormRedisBolt
package bhz.storm.redis.example; import java.util.HashMap; import java.util.Map; import java.util.UUID; import backtype.storm.task.TopologyContext; import backtype.storm.topology.BasicOutputCollector; import backtype.storm.topology.IBasicBolt; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.tuple.Tuple; public class StormRedisBolt implements IBasicBolt{ private static final long serialVersionUID = 2L; private RedisOperations redisOperations = null; private String redisIP = null; private int port; public StormRedisBolt(String redisIP, int port) { this.redisIP = redisIP; this.port = port; } public void execute(Tuple input, BasicOutputCollector collector) { Map<String, Object> record = new HashMap<String, Object>(); //"firstName","lastName","companyName") record.put("firstName", input.getValueByField("firstName")); record.put("lastName", input.getValueByField("lastName")); record.put("companyName", input.getValueByField("companyName")); redisOperations.insert(record, UUID.randomUUID().toString()); } public void declareOutputFields(OutputFieldsDeclarer declarer) { } public Map<String, Object> getComponentConfiguration() { return null; } public void prepare(Map stormConf, TopologyContext context) { redisOperations = new RedisOperations(this.redisIP, this.port); } public void cleanup() { } }
package bhz.storm.redis.example; import java.io.Serializable; import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; import redis.clients.jedis.Jedis; public class RedisOperations implements Serializable { private static final long serialVersionUID = 1L; Jedis jedis = null; public RedisOperations(String redisIP, int port) { // Connecting to Redis on localhost jedis = new Jedis(redisIP, port); } public void insert(Map<String, Object> record, String id) { try { jedis.set(id, new ObjectMapper().writeValueAsString(record)); } catch (Exception e) { System.out.println("Record not persist into datastore : "); } } }