digraph G {
subgraph cluster0 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: total (min, med, max (stageId: taskId))\n0 ms (0 ms, 0 ms, 0 ms (stage 535.0: task 3648))";
1 [labelType="html" label="<br><b>Project</b><br><br>"];
2 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
3 [labelType="html" label="<br><b>MapElements</b><br><br>"];
4 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
5 [labelType="html" label="<br><b>Project</b><br><br>"];
6 [labelType="html" label="<b>Filter</b><br><br>number of output rows: 1"];
7 [labelType="html" label="<b>Scan ExistingRDD Delta Table State #28 - hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/constraint-profile/_delta_log</b><br><br>number of output rows: 4"];
}
2->1;
3->2;
4->3;
5->4;
6->5;
7->6;
}
8
Project [path#50845, partitionValues#50846, size#50847L, modificationTime#50848L, dataChange#50849, null AS stats#50868, tags#50851, deletionVector#50852, baseRowId#50853L, defaultRowCommitVersion#50854L, clusteringProvider#50855]
SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).path, true, false, true) AS path#50845, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).partitionValues) AS partitionValues#50846, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).size AS size#50847L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).modificationTime AS modificationTime#50848L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).dataChange AS dataChange#50849, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).tags) AS tags#50851, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).maxRowIndex)) AS deletionVector#50852, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).baseRowId) AS baseRowId#50853L, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).defaultRowCommitVersion) AS defaultRowCommitVersion#50854L, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).clusteringProvider), true, false, true) AS clusteringProvider#50855]
MapElements org.apache.spark.sql.Dataset$$Lambda$6016/0x00007f71e9930000@5418fa9a, obj#50844: org.apache.spark.sql.delta.actions.AddFile
DeserializeToObject newInstance(class scala.Tuple1), obj#50843: scala.Tuple1
Project [add#50614]
Filter isnotnull(add#50614)
Scan ExistingRDD Delta Table State #28 - hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/constraint-profile/_delta_log[txn#50613,add#50614,remove#50615,metaData#50616,protocol#50617,cdc#50618,checkpointMetadata#50619,sidecar#50620,domainMetadata#50621,commitInfo#50622]
WholeStageCodegen (1)
== Physical Plan ==
* Project (7)
+- * SerializeFromObject (6)
+- * MapElements (5)
+- * DeserializeToObject (4)
+- * Project (3)
+- * Filter (2)
+- * Scan ExistingRDD Delta Table State #28 - hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/constraint-profile/_delta_log (1)
(1) Scan ExistingRDD Delta Table State #28 - hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/constraint-profile/_delta_log [codegen id : 1]
Output [10]: [txn#50613, add#50614, remove#50615, metaData#50616, protocol#50617, cdc#50618, checkpointMetadata#50619, sidecar#50620, domainMetadata#50621, commitInfo#50622]
Arguments: [txn#50613, add#50614, remove#50615, metaData#50616, protocol#50617, cdc#50618, checkpointMetadata#50619, sidecar#50620, domainMetadata#50621, commitInfo#50622], Delta Table State #28 - hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/constraint-profile/_delta_log MapPartitionsRDD[1418] at $anonfun$recordDeltaOperationInternal$1 at DatabricksLogging.scala:128, ExistingRDD, UnknownPartitioning(0)
(2) Filter [codegen id : 1]
Input [10]: [txn#50613, add#50614, remove#50615, metaData#50616, protocol#50617, cdc#50618, checkpointMetadata#50619, sidecar#50620, domainMetadata#50621, commitInfo#50622]
Condition : isnotnull(add#50614)
(3) Project [codegen id : 1]
Output [1]: [add#50614]
Input [10]: [txn#50613, add#50614, remove#50615, metaData#50616, protocol#50617, cdc#50618, checkpointMetadata#50619, sidecar#50620, domainMetadata#50621, commitInfo#50622]
(4) DeserializeToObject [codegen id : 1]
Input [1]: [add#50614]
Arguments: newInstance(class scala.Tuple1), obj#50843: scala.Tuple1
(5) MapElements [codegen id : 1]
Input [1]: [obj#50843]
Arguments: org.apache.spark.sql.Dataset$$Lambda$6016/0x00007f71e9930000@5418fa9a, obj#50844: org.apache.spark.sql.delta.actions.AddFile
(6) SerializeFromObject [codegen id : 1]
Input [1]: [obj#50844]
Arguments: [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).path, true, false, true) AS path#50845, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).partitionValues) AS partitionValues#50846, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).size AS size#50847L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).modificationTime AS modificationTime#50848L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).dataChange AS dataChange#50849, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).tags) AS tags#50851, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).maxRowIndex)) AS deletionVector#50852, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).baseRowId) AS baseRowId#50853L, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).defaultRowCommitVersion) AS defaultRowCommitVersion#50854L, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).clusteringProvider), true, false, true) AS clusteringProvider#50855]
(7) Project [codegen id : 1]
Output [11]: [path#50845, partitionValues#50846, size#50847L, modificationTime#50848L, dataChange#50849, null AS stats#50868, tags#50851, deletionVector#50852, baseRowId#50853L, defaultRowCommitVersion#50854L, clusteringProvider#50855]
Input [10]: [path#50845, partitionValues#50846, size#50847L, modificationTime#50848L, dataChange#50849, tags#50851, deletionVector#50852, baseRowId#50853L, defaultRowCommitVersion#50854L, clusteringProvider#50855]