/* (non-Javadoc) * @see backtype.storm.topology.IComponent#declareOutputFields(backtype.storm.topology.OutputFieldsDeclarer) */ @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("url", "har")); declarer.declareStream("urls", new Fields("url")); declarer.declareStream("renderedContent", new Fields("url", "content")); declarer.declareStream("renderedImage", new Fields("url", "image")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("url", "content", "metadata")); declarer.declareStream( com.digitalpebble.storm.crawler.Constants.StatusStreamName, new Fields("url", "metadata", "status")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // output of this module is the list of fields to index // with at least the URL, text content declarer.declare(new Fields("url", "content", "metadata", "text")); declarer.declareStream(StatusStreamName, new Fields("url", "metadata", "status")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // in partitioned example, in case an emitter task receives a later transaction than it's // emitted so far, // when it sees the earlier txid it should know to emit nothing declarer.declareStream( TRANSACTION_BATCH_STREAM_ID, new Fields("tx", "tx-meta", "committed-txid")); declarer.declareStream(TRANSACTION_COMMIT_STREAM_ID, new Fields("tx")); }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { outputFieldsDeclarer.declareStream( DETECTED_LOGO_STREAM, new Fields(FIELD_FRAME_ID, FIELD_PATCH_IDENTIFIER, FIELD_FOUND_RECT, FIELD_PATCH_COUNT)); outputFieldsDeclarer.declareStream( LOGO_TEMPLATE_UPDATE_STREAM, new Fields( FIELD_HOST_PATCH_IDENTIFIER, FIELD_DETECTED_LOGO_RECT, FIELD_PARENT_PATCH_IDENTIFIER, FIELD_LOGO_INDEX)); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { String[] fields = new String[query.getResultVars().size() + 1]; fields[0] = "msg"; for (int i = 1; i <= query.getResultVars().size(); i++) fields[i] = "V" + i; declarer.declare(new Fields(fields)); }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { GenericBoltUtils genericBoltUtils = new GenericBoltUtils(); outputFieldsDeclarer.declareStream( streamId, genericBoltUtils.getDeclareOutputFields(previousEmitFileds, statement.getColumns())); }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { // tell storm the schema of the output tuple for this spout // tuple consists of a two columns called 'word' and 'count' // declare the first column 'word', second colmun 'count' // **************************************************** // BEGIN YOUR CODE - part 1b // uncomment line below to declare output outputFieldsDeclarer.declare(new Fields("word", "count")); // END YOUR CODE // **************************************************** }
/** * We don't need to set any configuration because at deployment time, it should pick up all * configuration from hbase-site.xml as long as it in classpath. Note that we store hbase-site.xml * in src/main/resources so it will be in the topology jar that gets deployed * * @return */ public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare( new Fields( "prediction", "driverName", "routeName", "driverId", "truckId", "timeStamp", "longitude", "latitude", "certified", "wagePlan", "hours_logged", "miles_logged", "isFoggy", "isRainy", "isWindy")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { if (siddhiManager == null) { init(); } // Declaring output fileds for each exported stream ID for (String streamId : exportedStreamIds) { StreamDefinition streamDefinition = siddhiManager.getStreamDefinition(streamId); if (streamDefinition == null) { throw new RuntimeException("Cannot find exported stream - " + streamId); } List<String> list = new ArrayList<String>(); for (Attribute attribute : streamDefinition.getAttributeList()) { list.add(attribute.getName()); } Fields fields = new Fields(list); declarer.declareStream(streamId, fields); log.info("Declaring output field for stream -" + streamId); } }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { // tell storm the schema of the output tuple for this spout // tuple consists of a single column called 'word' outputFieldsDeclarer.declare(new Fields("word")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("candidate", "num1", "num2", "num_tests")); }
public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("sentence")); }
@Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { outputFieldsDeclarer.declare(new Fields("results")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { for (PartitionOperator partitionOperator : outgoingOperators) { declarer.declareStream(partitionOperator.getName(), new Fields(TUPLE_FIELD)); } }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("lang", "word")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare( new Fields("geo", "pub", "adv", "website", "bid", "cookie", "date", "dateUpToMinute")); }
public void declareOutputFields(final OutputFieldsDeclarer declarer) { declarer.declare(new Fields("MessageExtList", "MessageId")); }
/** * Declares the output fields of this spout according to the provided {@link * backtype.storm.spout.Scheme}. * * <p>Additionally declares an error stream (see {@link #ERROR_STREAM_NAME} for handling malformed * or empty messages to avoid infinite retry loops */ @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(serialisationScheme.getOutputFields()); declarer.declareStream(ERROR_STREAM_NAME, new Fields("deliveryTag", "bytes", "otherbytes")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("STATUS", "FOLLOWERS", "FRIENDS ")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream( SystemParameters.DUMP_RESULTS_STREAM, new Fields(SystemParameters.DUMP_RESULTS)); }
public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("eventoutput", "desc", "action")); }
public void declareOutputFields(OutputFieldsDeclarer declearer) { declearer.declare(new Fields(this.OutputFieldName)); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("id", "partial-count")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("word", "count")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("id", "tweeter")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(this._configParser.scheme.getOutputFields()); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(_outFields); }
public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("constString")); }
@Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("USERNAME", "LIST", "VECTOR")); }