digraph G {
0 [labelType="html" label="<br><b>WriteToDataSourceV2</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (1)";
2 [labelType="html" label="<br><b>Project</b><br><br>"];
}
3 [labelType="html" label="<br><b>Project</b><br><br>"];
4 [labelType="html" label="<br><b>MicroBatchScan</b><br><br>"];
2->0;
3->2;
4->3;
}
5
WriteToDataSourceV2 MicroBatchWrite[epoch: 95, writer: org.apache.spark.sql.execution.streaming.sources.ForeachWrite$$anon$2@505a1774], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$2505/0x00000008411d5440@41f94dab
Project [data#26.uniqueId AS uniqueId#28, data#26.eventType AS eventType#29, data#26.eventTime AS eventTime#30, data#26.userId AS userId#31, data#26.projectId AS projectId#32, data#26.projectTitle AS projectTitle#33, data#26.videoTime AS videoTime#34, data#26.productId AS productId#35, data#26.productName AS productName#36, data#26.brandId AS brandId#37, data#26.brandName AS brandName#38, data#26.actorId AS actorId#39, data#26.actorName AS actorName#40, data#26.operatorId AS operatorId#41, data#26.operatorName AS operatorName#42, data#26.screeningFileId AS screeningFileId#43, data#26.deviceType AS deviceType#44, data#26.productSKU AS productSKU#45, data#26.productQuantity AS productQuantity#46, data#26.productTotal AS productTotal#47, data#26.shippingAddress AS shippingAddress#48, data#26.billingAddress AS billingAddress#49, data#26.vovId AS vovId#50, data#26.vovName AS vovName#51, data#26.duration AS duration#52L]
WholeStageCodegen (1)
Project [from_json(StructField(uniqueId,StringType,true), StructField(eventType,StringType,true), StructField(eventTime,StringType,true), StructField(userId,StringType,true), StructField(projectId,StringType,true), StructField(projectTitle,StringType,true), StructField(videoTime,StringType,true), StructField(productId,StringType,true), StructField(productName,StringType,true), StructField(brandId,StringType,true), StructField(brandName,StringType,true), StructField(actorId,StringType,true), StructField(actorName,StringType,true), StructField(operatorId,StringType,true), StructField(operatorName,StringType,true), StructField(screeningFileId,StringType,true), StructField(deviceType,StringType,true), StructField(productSKU,StringType,true), StructField(productQuantity,IntegerType,true), StructField(productTotal,DoubleType,true), StructField(shippingAddress,StringType,true), StructField(billingAddress,StringType,true), StructField(vovId,StringType,true), StructField(vovName,StringType,true), ... 3 more fields) AS data#26]
MicroBatchScan[key#7, value#8, topic#9, partition#10, offset#11L, timestamp#12, timestampType#13] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
== Physical Plan ==
WriteToDataSourceV2 (4)
+- * Project (3)
+- Project (2)
+- MicroBatchScan (1)
(1) MicroBatchScan
Output [7]: [key#7, value#8, topic#9, partition#10, offset#11L, timestamp#12, timestampType#13]
class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
(2) Project
Output [1]: [from_json(StructField(uniqueId,StringType,true), StructField(eventType,StringType,true), StructField(eventTime,StringType,true), StructField(userId,StringType,true), StructField(projectId,StringType,true), StructField(projectTitle,StringType,true), StructField(videoTime,StringType,true), StructField(productId,StringType,true), StructField(productName,StringType,true), StructField(brandId,StringType,true), StructField(brandName,StringType,true), StructField(actorId,StringType,true), StructField(actorName,StringType,true), StructField(operatorId,StringType,true), StructField(operatorName,StringType,true), StructField(screeningFileId,StringType,true), StructField(deviceType,StringType,true), StructField(productSKU,StringType,true), StructField(productQuantity,IntegerType,true), StructField(productTotal,DoubleType,true), StructField(shippingAddress,StringType,true), StructField(billingAddress,StringType,true), StructField(vovId,StringType,true), StructField(vovName,StringType,true), ... 3 more fields) AS data#26]
Input [7]: [key#7, value#8, topic#9, partition#10, offset#11L, timestamp#12, timestampType#13]
(3) Project [codegen id : 1]
Output [25]: [data#26.uniqueId AS uniqueId#28, data#26.eventType AS eventType#29, data#26.eventTime AS eventTime#30, data#26.userId AS userId#31, data#26.projectId AS projectId#32, data#26.projectTitle AS projectTitle#33, data#26.videoTime AS videoTime#34, data#26.productId AS productId#35, data#26.productName AS productName#36, data#26.brandId AS brandId#37, data#26.brandName AS brandName#38, data#26.actorId AS actorId#39, data#26.actorName AS actorName#40, data#26.operatorId AS operatorId#41, data#26.operatorName AS operatorName#42, data#26.screeningFileId AS screeningFileId#43, data#26.deviceType AS deviceType#44, data#26.productSKU AS productSKU#45, data#26.productQuantity AS productQuantity#46, data#26.productTotal AS productTotal#47, data#26.shippingAddress AS shippingAddress#48, data#26.billingAddress AS billingAddress#49, data#26.vovId AS vovId#50, data#26.vovName AS vovName#51, data#26.duration AS duration#52L]
Input [1]: [data#26]
(4) WriteToDataSourceV2
Input [25]: [uniqueId#28, eventType#29, eventTime#30, userId#31, projectId#32, projectTitle#33, videoTime#34, productId#35, productName#36, brandId#37, brandName#38, actorId#39, actorName#40, operatorId#41, operatorName#42, screeningFileId#43, deviceType#44, productSKU#45, productQuantity#46, productTotal#47, shippingAddress#48, billingAddress#49, vovId#50, vovName#51, duration#52L]
Arguments: MicroBatchWrite[epoch: 95, writer: org.apache.spark.sql.execution.streaming.sources.ForeachWrite$$anon$2@2e362049], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$2505/0x00000008411d5440@70776650