这篇文章主要为大家展示了“Flink如何读取数据源”,内容简而易懂,条理清晰,希望能够帮助大家解决疑惑,下面让小编带领大家一起研究并学习一下“Flink如何读取数据源”这篇文章吧。
private static void radFromCollection(String[] args) throws Exception { //将参数转成对象 MultipleParameterTool params = MultipleParameterTool.fromArgs(args); //创建批处理执行环境 // ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); //创建流程处理 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); //设置每个算子的的并行度,默认为cup核数(测试环境下) env.setParallelism(2); //设置最大并行度 env.setMaxParallelism(6); //从集合中读取 List<String> collectionData = Arrays.asList("a", "b", "c", "d"); DataStreamSource<String> dataStreamSource = env.fromCollection(collectionData); //从数组中读取 // env.fromElements("a", "b", "c", "d"); dataStreamSource.print(); //dataStreamSource.addSink(new PrintSinkFunction<>()); env.execute(); }
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<String> dataStreamSource = env.readTextFile("E:\\GIT\\flink-learn\\flink1\\word.txt", "utf-8"); dataStreamSource.print(); env.execute();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Properties properties = new Properties(); properties.put("bootstrap.servers", "10.1.5.130:9092"); properties.put("zookeeper.connect", "10.2.5.135:2181"); properties.put("group.id", "my-flink"); properties.put("auto.offset.reset", "latest"); properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); FlinkKafkaConsumer010<String> kafkaConsumer010 = new FlinkKafkaConsumer010<>( "flink",// topic new SimpleStringSchema(), properties ); DataStreamSource<String> dataStreamSource = env.addSource(kafkaConsumer010); dataStreamSource.print(); env.execute();
实现 org.apache.flink.streaming.api.functions.source.SourceFunction
public static final class MyDataSource implements SourceFunction<String> { private Boolean running = true; @Override public void run(SourceContext<String> sourceContext) throws Exception { Random random = new Random(); while (running) { double data = random.nextDouble() * 100; sourceContext.collectWithTimestamp(String.valueOf(data), System.currentTimeMillis()); TimeUnit.SECONDS.sleep(1); } } @Override public void cancel() { this.running = false; } }
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<String> dataStreamSource = env.addSource(new MyDataSource()); dataStreamSource.print(); env.execute();
以上是“Flink如何读取数据源”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注亿速云行业资讯频道!
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。