Possible to have more than 1 cols agged and mapped

2019-08-30 01:35发布

问题:

I have many target col to sum for same filtering condition. Use hbase as database. So I tried to use storm trident to finish it. In any SQL, same where/group, always you can sum many target cols. But I failed to do so in trident. Error prompts "Got unexpected delegates to Combolist".

Here is my code:

public class HbaseWordCountTridentTopolopgyAggedSumManyLong {
    static public class WordCountValueMapper implements HBaseValueMapper {
        @Override
        public List<Values> toValues(ITuple tuple, Result result) throws Exception {
            List<Values> values = new ArrayList<Values>();
            Cell[] cells = result.rawCells();
            for(Cell cell : cells) {

                String colName = Bytes.toString(CellUtil.cloneQualifier(cell));
                Values value = new Values (colName, Bytes.toLong(CellUtil.cloneValue(cell)));
                values.add(value);
            }
            return values;
        }

        @Override
        public void declareOutputFields(OutputFieldsDeclarer declarer) {
            declarer.declare(new Fields("columnName","columnValue"));
        }

    }

    public static class One extends BaseFunction {

        public One() {
        }

        public void execute(TridentTuple tuple, TridentCollector collector) {
            String word = tuple.getString(0);
            //RANDOM.nextGaussian()
            collector.emit(new Values(1L, 2L));

        }
    }

    public static StormTopology buildTopology() {
        Fields fields = new Fields("word", "count", "count2");

        FixedBatchSpout spout2Split = new FixedBatchSpout(new Fields("sentence"), 3, new Values("the cow jumped over the moon"),
                new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"),
                new Values("how many apples can you eat"), new Values("to be or not to be the person"));
        spout2Split.setCycle(true);

        TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper()
                .withColumnFamily("INFO")
                .withColumnFields(new Fields("word"))
                .withCounterFields(new Fields("count", "count2"))
                .withRowKeyField("word");

        HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();

        HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("INFO", "count"));
        projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("INFO", "count2"));

        HBaseState.Options options = new HBaseState.Options()
                .withDurability(Durability.SYNC_WAL)
                .withMapper(tridentHBaseMapper)
                .withProjectionCriteria(projectionCriteria)
                .withRowToStormValueMapper(rowToStormValueMapper)
                .withTableName("test_HbaseWordCountTridentTopolopgy");

        StateFactory factory = new HBaseStateFactory(options);

        TridentTopology topology = new TridentTopology();

        Stream stream =
                topology.newStream("spout2Split", spout2Split)
                        .each(new Fields("sentence"), new Split(), new Fields("word"))
                        .each(new Fields("word"), new One(), new Fields("one", "one2"))
                        .groupBy(new Fields("word"))
                        .aggregate(new Fields("one", "one2"), new Sum(), new Fields("count", "count2"))
                ;
        stream.partitionPersist(factory, fields,  new HBaseUpdater(), new Fields());
        TridentState state = topology.newStaticState(factory);

        stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName","columnValue"));
        stream.each(new Fields("word","columnValue"), new PrintFunction(), new Fields());
        return topology.build();
    }

    public static void main(String[] args) throws Exception{
        Map<String, Object> hbConf = new HashMap<String, Object>();
        Config conf = new Config();
        conf.setMaxSpoutPending(5);
        boolean checkLocal = Arrays.stream(args).map(arg -> arg.equals("local")).reduce((arg1, arg2) -> arg1 | arg2 ).orElse(false);
        if (checkLocal) {
            LocalCluster cluster = new LocalCluster();
            cluster.submitTopology("HbaseWordCountTridentTopolopgy", conf, buildTopology());
            Thread.sleep(60 * 1000);
        }else {//if(args.length == 2) {
            conf.setNumWorkers(3);
            StormSubmitter.submitTopology("hbase-word-count-trident", conf, buildTopology());
        }
    }

}