Flink运行出现Assigned key must not be null
生活随笔
收集整理的這篇文章主要介紹了
Flink运行出现Assigned key must not be null
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
完整報錯如下:
Exception in thread "main" java.util.concurrent.ExecutionException: org.apache.flink.runtime.client.JobExecutionException: Job execution failed.at java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895)at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1719)at org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:74)at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1699)at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1681)at KeyBy.main(KeyBy.java:63) Caused by: org.apache.flink.runtime.client.JobExecutionException: Job execution failed.at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)at org.apache.flink.client.program.PerJobMiniClusterFactory$PerJobMiniClusterJobClient.lambda$getJobExecutionResult$2(PerJobMiniClusterFactory.java:186)at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602)at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577)at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$0(AkkaInvocationHandler.java:229)at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760)at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736)at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474)at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962)at org.apache.flink.runtime.concurrent.FutureUtils$1.onComplete(FutureUtils.java:892)at akka.dispatch.OnComplete.internal(Future.scala:264)at akka.dispatch.OnComplete.internal(Future.scala:261)at akka.dispatch.japi$CallbackBridge.apply(Future.scala:191)at akka.dispatch.japi$CallbackBridge.apply(Future.scala:188)at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36)at org.apache.flink.runtime.concurrent.Executors$DirectExecutionContext.execute(Executors.java:74)at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:44)at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:252)at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:572)at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:22)at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:21)at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:436)at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:435)at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36)at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:55)at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply$mcV$sp(BatchingExecutor.scala:91)at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91)at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91)at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72)at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:90)at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40)at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44)at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategyat org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192)at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:185)at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:179)at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:506)at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:386)at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)at java.lang.reflect.Method.invoke(Method.java:498)at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284)at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199)at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152)at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)at scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123)at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170)at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)at akka.actor.Actor$class.aroundReceive(Actor.scala:517)at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)at akka.actor.ActorCell.invoke(ActorCell.scala:561)at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)at akka.dispatch.Mailbox.run(Mailbox.scala:225)at akka.dispatch.Mailbox.exec(Mailbox.scala:235)... 4 more Caused by: java.lang.RuntimeException: Assigned key must not be null!at org.apache.flink.streaming.runtime.io.RecordWriterOutput.pushToRecordWriter(RecordWriterOutput.java:110)at org.apache.flink.streaming.runtime.io.RecordWriterOutput.collect(RecordWriterOutput.java:89)at org.apache.flink.streaming.runtime.io.RecordWriterOutput.collect(RecordWriterOutput.java:45)at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)at org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collect(StreamSourceContexts.java:104)at org.apache.flink.streaming.api.functions.source.FromElementsFunction.run(FromElementsFunction.java:164)at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:100)at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)at org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:201) Caused by: java.lang.NullPointerException: Assigned key must not be null!at org.apache.flink.util.Preconditions.checkNotNull(Preconditions.java:75)at org.apache.flink.runtime.state.KeyGroupRangeAssignment.assignKeyToParallelOperator(KeyGroupRangeAssignment.java:49)at org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner.selectChannel(KeyGroupStreamPartitioner.java:58)at org.apache.flink.streaming.runtime.partitioner.KeyGroupStreamPartitioner.selectChannel(KeyGroupStreamPartitioner.java:32)at org.apache.flink.runtime.io.network.api.writer.ChannelSelectorRecordWriter.emit(ChannelSelectorRecordWriter.java:60)at org.apache.flink.streaming.runtime.io.RecordWriterOutput.pushToRecordWriter(RecordWriterOutput.java:107)... 9 more原始代碼如下:
import org.apache.flink.api.common.functions.ReduceFunction; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;import java.util.Arrays;/*** Summary:* Reduce: 基于ReduceFunction進行滾動聚合,并向下游算子輸出每次滾動聚合后的結果。*/ public class KeyBy {public static void main(String[] args) throws Exception{StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();// 輸入: 用戶行為。某個用戶在某個時刻點擊或瀏覽了某個商品,以及商品的價格。// 下面的數據中,分別是ID,時間戳,用戶動作,商品ID,商品價格.DataStreamSource<UserAction> source = env.fromCollection(Arrays.asList(new UserAction("userID1", 1293984000, "click", "productID1", 10),new UserAction("userID2", 1293984001, "browse", "productID2", 8),new UserAction("userID2", 1293984002, "browse", "productID2", 8),new UserAction("userID2", 1293984003, "browse", "productID2", 8),new UserAction("userID1", 1293984002, "click", "productID1", 10),new UserAction("userID1", 1293984003, "click", "productID3", 10),new UserAction("userID1", 1293984004, "click", "productID1", 10)));// 轉換: KeyBy對數據重分區KeyedStream<UserAction, String> keyedStream = source.keyBy(new KeySelector<UserAction, String>() {@Overridepublic String getKey(UserAction value) throws Exception{return value.getUserID();}});// 轉換: Reduce滾動聚合。這里,滾動聚合每個用戶對應的商品總價格。SingleOutputStreamOperator<UserAction> result = keyedStream.reduce(new ReduceFunction<UserAction>(){@Overridepublic UserAction reduce(UserAction value1, UserAction value2) throws Exception{int newProductPrice = value1.getProductPrice() + value2.getProductPrice();return new UserAction(value1.getUserID(), -1, "", "", newProductPrice);}});// 輸出: 將每次滾動聚合后的結果輸出到控制臺。//3> UserAction(userID=userID2, eventTime=1293984001, eventType=browse, productID=productID2, productPrice=8)//3> UserAction(userID=userID2, eventTime=-1, eventType=, productID=, productPrice=16)//3> UserAction(userID=userID2, eventTime=-1, eventType=, productID=, productPrice=24)//4> UserAction(userID=userID1, eventTime=1293984000, eventType=click, productID=productID1, productPrice=10)//4> UserAction(userID=userID1, eventTime=-1, eventType=, productID=, productPrice=20)//4> UserAction(userID=userID1, eventTime=-1, eventType=, productID=, productPrice=30)//4> UserAction(userID=userID1, eventTime=-1, eventType=, productID=, productPrice=40)result.print();env.execute();} }UserAction.java
public class UserAction {private String userId; //用戶idprivate long timestamp; //商品idprivate String behavior; //用戶行為(pv, buy, cart, fav)private String itemId; //商品分類idprivate int price; //商品價格public UserAction(String user,long timestamp,String behavior,String itemId,int price){this.userId=userId;this.timestamp=timestamp;this.itemId=itemId;this.behavior=behavior;this.price=price;}public String getUserID() {return userId;}public int getProductPrice(){return price;}// public String getCategoryId() { // return categoryId; // } // // public void setCategoryId(String categoryId) { // this.categoryId = categoryId; // }public String getBehavior() {return behavior;}public long getTimestamp() {return timestamp;}}?
解決方案:
UserAction中的
public UserAction(String user,long timestamp,String behavior,String itemId,int price)改成
public UserAction(String userId,long timestamp,String behavior,String itemId,int price)總結
以上是生活随笔為你收集整理的Flink运行出现Assigned key must not be null的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 计算机中用于存放下一条要执行指令的地址的
- 下一篇: flink的print()函数输出的都是