digraph G {
0 [labelType="html" label="<br><b>AdaptiveSparkPlan</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (4)";
2 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
}
3 [labelType="html" label="<br><b>MapPartitions</b><br><br>"];
4 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
subgraph cluster5 {
isCluster="true";
label="WholeStageCodegen (3)";
6 [labelType="html" label="<br><b>Project</b><br><br>"];
7 [labelType="html" label="<br><b>Sort</b><br><br>"];
}
8 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 28<br>shuffle write time total (min, med, max (stageId: taskId))<br>5 ms (0 ms, 0 ms, 1 ms (stage 316.0: task 2282))<br>data size total (min, med, max (stageId: taskId))<br>37.5 KiB (5.1 KiB, 5.1 KiB, 11.8 KiB (stage 316.0: task 2282))<br>number of partitions: 50<br>shuffle bytes written total (min, med, max (stageId: taskId))<br>18.9 KiB (2.8 KiB, 2.8 KiB, 4.9 KiB (stage 316.0: task 2282))"];
9 [labelType="html" label="<br><b>Union</b><br><br>"];
subgraph cluster10 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: 108 ms";
11 [labelType="html" label="<br><b>Project</b><br><br>"];
12 [labelType="html" label="<b>ColumnarToRow</b><br><br>number of output rows: 13<br>number of input batches: 1"];
}
13 [labelType="html" label="<b>Scan parquet </b><br><br>number of files read: 1<br>scan time: 102 ms<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 18.1 KiB<br>number of output rows: 13<br>number of partitions read: 1"];
subgraph cluster14 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: total (min, med, max (stageId: taskId))\n246 ms (27 ms, 54 ms, 68 ms (stage 316.0: task 2286))";
15 [labelType="html" label="<br><b>Project</b><br><br>"];
}
16 [labelType="html" label="<b>Scan json </b><br><br>number of files read: 5<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 21.2 KiB<br>number of output rows: 15<br>number of partitions read: 5"];
2->0;
3->2;
4->3;
6->4;
7->6;
8->7;
9->8;
11->9;
12->11;
13->12;
15->9;
16->15;
}
17
AdaptiveSparkPlan isFinalPlan=true
SerializeFromObject [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#30137, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#30138, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#30139, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#30140, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#30141, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#30142, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#30143, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#30144, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#30145, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#30146]
WholeStageCodegen (4)
MapPartitions org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@7b63787e, obj#30136: org.apache.spark.sql.delta.actions.SingleAction
DeserializeToObject newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#30135: org.apache.spark.sql.delta.actions.SingleAction
Project [txn#29953, CASE WHEN isnotnull(_extract_path#30158) THEN struct(path, add_path_canonical#30022, partitionValues, _extract_partitionValues#30159, size, _extract_size#30160L, modificationTime, _extract_modificationTime#30161L, dataChange, _extract_dataChange#30162, stats, add_stats_to_use#30008, tags, _extract_tags#30163, deletionVector, _extract_deletionVector#30164, baseRowId, _extract_baseRowId#30165L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#30166L, clusteringProvider, _extract_clusteringProvider#30167) END AS add#30063, CASE WHEN isnotnull(remove#29955.path) THEN if (isnull(remove#29955)) null else named_struct(path, remove_path_canonical#30041, deletionTimestamp, remove#29955.deletionTimestamp, dataChange, remove#29955.dataChange, extendedFileMetadata, remove#29955.extendedFileMetadata, partitionValues, remove#29955.partitionValues, size, remove#29955.size, tags, remove#29955.tags, deletionVector, remove#29955.deletionVector, baseRowId, remove#29955.baseRowId, defaultRowCommitVersion, remove#29955.defaultRowCommitVersion, stats, remove#29955.stats) END AS remove#30088, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962]
Sort [version#29963L ASC NULLS FIRST], false, 0
WholeStageCodegen (3)
Exchange hashpartitioning(coalesce(add_path_canonical#30022, remove_path_canonical#30041), 50), REPARTITION_BY_NUM, [plan_id=9072]
Union
Project [txn#29953, add#29954.path AS _extract_path#30158, add#29954.partitionValues AS _extract_partitionValues#30159, add#29954.size AS _extract_size#30160L, add#29954.modificationTime AS _extract_modificationTime#30161L, add#29954.dataChange AS _extract_dataChange#30162, add#29954.tags AS _extract_tags#30163, add#29954.deletionVector AS _extract_deletionVector#30164, add#29954.baseRowId AS _extract_baseRowId#30165L, add#29954.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30166L, add#29954.clusteringProvider AS _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add#29954.stats AS add_stats_to_use#30008, CASE WHEN isnotnull(add#29954.path) THEN UDF(add#29954.path) END AS add_path_canonical#30022, CASE WHEN isnotnull(remove#29955.path) THEN UDF(remove#29955.path) END AS remove_path_canonical#30041]
ColumnarToRow
WholeStageCodegen (1)
FileScan parquet [txn#29953,add#29954,remove#29955,metaData#29956,protocol#29957,cdc#29958,checkpointMetadata#29959,sidecar#29960,domainMetadata#29961,commitInfo#29962,version#29963L] Batched: true, DataFilters: [], Format: Parquet, Location: DeltaLogFileIndex(1 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
Project [txn#29975, add#29976.path AS _extract_path#30168, add#29976.partitionValues AS _extract_partitionValues#30169, add#29976.size AS _extract_size#30170L, add#29976.modificationTime AS _extract_modificationTime#30171L, add#29976.dataChange AS _extract_dataChange#30172, add#29976.tags AS _extract_tags#30173, add#29976.deletionVector AS _extract_deletionVector#30174, add#29976.baseRowId AS _extract_baseRowId#30175L, add#29976.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30176L, add#29976.clusteringProvider AS _extract_clusteringProvider#30177, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L, add#29976.stats AS add_stats_to_use#30147, CASE WHEN isnotnull(add#29976.path) THEN UDF(add#29976.path) END AS add_path_canonical#30178, CASE WHEN isnotnull(remove#29977.path) THEN UDF(remove#29977.path) END AS remove_path_canonical#30179]
WholeStageCodegen (2)
FileScan json [txn#29975,add#29976,remove#29977,metaData#29978,protocol#29979,cdc#29980,checkpointMetadata#29981,sidecar#29982,domainMetadata#29983,commitInfo#29984,version#29985L] Batched: false, DataFilters: [], Format: JSON, Location: DeltaLogFileIndex(5 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
== Physical Plan ==
AdaptiveSparkPlan (23)
+- == Final Plan ==
* SerializeFromObject (13)
+- MapPartitions (12)
+- DeserializeToObject (11)
+- * Project (10)
+- * Sort (9)
+- ShuffleQueryStage (8), Statistics(sizeInBytes=37.5 KiB, rowCount=28)
+- Exchange (7)
+- Union (6)
:- * Project (3)
: +- * ColumnarToRow (2)
: +- Scan parquet (1)
+- * Project (5)
+- Scan json (4)
+- == Initial Plan ==
SerializeFromObject (22)
+- MapPartitions (21)
+- DeserializeToObject (20)
+- Project (19)
+- Sort (18)
+- Exchange (17)
+- Union (16)
:- Project (14)
: +- Scan parquet (1)
+- Project (15)
+- Scan json (4)
(1) Scan parquet
Output [11]: [txn#29953, add#29954, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L]
Batched: true
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/cornerstone/sap-cic-product-productplant/_delta_log/00000000000000000060.checkpoint.parquet]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(2) ColumnarToRow [codegen id : 1]
Input [11]: [txn#29953, add#29954, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L]
(3) Project [codegen id : 1]
Output [23]: [txn#29953, add#29954.path AS _extract_path#30158, add#29954.partitionValues AS _extract_partitionValues#30159, add#29954.size AS _extract_size#30160L, add#29954.modificationTime AS _extract_modificationTime#30161L, add#29954.dataChange AS _extract_dataChange#30162, add#29954.tags AS _extract_tags#30163, add#29954.deletionVector AS _extract_deletionVector#30164, add#29954.baseRowId AS _extract_baseRowId#30165L, add#29954.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30166L, add#29954.clusteringProvider AS _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add#29954.stats AS add_stats_to_use#30008, CASE WHEN isnotnull(add#29954.path) THEN UDF(add#29954.path) END AS add_path_canonical#30022, CASE WHEN isnotnull(remove#29955.path) THEN UDF(remove#29955.path) END AS remove_path_canonical#30041]
Input [11]: [txn#29953, add#29954, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L]
(4) Scan json
Output [11]: [txn#29975, add#29976, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L]
Batched: false
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/cornerstone/sap-cic-product-productplant/_delta_log/00000000000000000061.json, ... 4 entries]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(5) Project [codegen id : 2]
Output [23]: [txn#29975, add#29976.path AS _extract_path#30168, add#29976.partitionValues AS _extract_partitionValues#30169, add#29976.size AS _extract_size#30170L, add#29976.modificationTime AS _extract_modificationTime#30171L, add#29976.dataChange AS _extract_dataChange#30172, add#29976.tags AS _extract_tags#30173, add#29976.deletionVector AS _extract_deletionVector#30174, add#29976.baseRowId AS _extract_baseRowId#30175L, add#29976.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30176L, add#29976.clusteringProvider AS _extract_clusteringProvider#30177, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L, add#29976.stats AS add_stats_to_use#30147, CASE WHEN isnotnull(add#29976.path) THEN UDF(add#29976.path) END AS add_path_canonical#30178, CASE WHEN isnotnull(remove#29977.path) THEN UDF(remove#29977.path) END AS remove_path_canonical#30179]
Input [11]: [txn#29975, add#29976, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L]
(6) Union
(7) Exchange
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
Arguments: hashpartitioning(coalesce(add_path_canonical#30022, remove_path_canonical#30041), 50), REPARTITION_BY_NUM, [plan_id=9072]
(8) ShuffleQueryStage
Output [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
Arguments: 0
(9) Sort [codegen id : 3]
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
Arguments: [version#29963L ASC NULLS FIRST], false, 0
(10) Project [codegen id : 3]
Output [10]: [txn#29953, CASE WHEN isnotnull(_extract_path#30158) THEN struct(path, add_path_canonical#30022, partitionValues, _extract_partitionValues#30159, size, _extract_size#30160L, modificationTime, _extract_modificationTime#30161L, dataChange, _extract_dataChange#30162, stats, add_stats_to_use#30008, tags, _extract_tags#30163, deletionVector, _extract_deletionVector#30164, baseRowId, _extract_baseRowId#30165L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#30166L, clusteringProvider, _extract_clusteringProvider#30167) END AS add#30063, CASE WHEN isnotnull(remove#29955.path) THEN if (isnull(remove#29955)) null else named_struct(path, remove_path_canonical#30041, deletionTimestamp, remove#29955.deletionTimestamp, dataChange, remove#29955.dataChange, extendedFileMetadata, remove#29955.extendedFileMetadata, partitionValues, remove#29955.partitionValues, size, remove#29955.size, tags, remove#29955.tags, deletionVector, remove#29955.deletionVector, baseRowId, remove#29955.baseRowId, defaultRowCommitVersion, remove#29955.defaultRowCommitVersion, stats, remove#29955.stats) END AS remove#30088, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962]
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
(11) DeserializeToObject
Input [10]: [txn#29953, add#30063, remove#30088, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#30135: org.apache.spark.sql.delta.actions.SingleAction
(12) MapPartitions
Input [1]: [obj#30135]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@7b63787e, obj#30136: org.apache.spark.sql.delta.actions.SingleAction
(13) SerializeFromObject [codegen id : 4]
Input [1]: [obj#30136]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#30137, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#30138, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#30139, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#30140, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#30141, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#30142, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#30143, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#30144, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#30145, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#30146]
(14) Project
Output [23]: [txn#29953, add#29954.path AS _extract_path#30158, add#29954.partitionValues AS _extract_partitionValues#30159, add#29954.size AS _extract_size#30160L, add#29954.modificationTime AS _extract_modificationTime#30161L, add#29954.dataChange AS _extract_dataChange#30162, add#29954.tags AS _extract_tags#30163, add#29954.deletionVector AS _extract_deletionVector#30164, add#29954.baseRowId AS _extract_baseRowId#30165L, add#29954.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30166L, add#29954.clusteringProvider AS _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add#29954.stats AS add_stats_to_use#30008, CASE WHEN isnotnull(add#29954.path) THEN UDF(add#29954.path) END AS add_path_canonical#30022, CASE WHEN isnotnull(remove#29955.path) THEN UDF(remove#29955.path) END AS remove_path_canonical#30041]
Input [11]: [txn#29953, add#29954, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L]
(15) Project
Output [23]: [txn#29975, add#29976.path AS _extract_path#30168, add#29976.partitionValues AS _extract_partitionValues#30169, add#29976.size AS _extract_size#30170L, add#29976.modificationTime AS _extract_modificationTime#30171L, add#29976.dataChange AS _extract_dataChange#30172, add#29976.tags AS _extract_tags#30173, add#29976.deletionVector AS _extract_deletionVector#30174, add#29976.baseRowId AS _extract_baseRowId#30175L, add#29976.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#30176L, add#29976.clusteringProvider AS _extract_clusteringProvider#30177, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L, add#29976.stats AS add_stats_to_use#30147, CASE WHEN isnotnull(add#29976.path) THEN UDF(add#29976.path) END AS add_path_canonical#30178, CASE WHEN isnotnull(remove#29977.path) THEN UDF(remove#29977.path) END AS remove_path_canonical#30179]
Input [11]: [txn#29975, add#29976, remove#29977, metaData#29978, protocol#29979, cdc#29980, checkpointMetadata#29981, sidecar#29982, domainMetadata#29983, commitInfo#29984, version#29985L]
(16) Union
(17) Exchange
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
Arguments: hashpartitioning(coalesce(add_path_canonical#30022, remove_path_canonical#30041), 50), REPARTITION_BY_NUM, [plan_id=9044]
(18) Sort
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
Arguments: [version#29963L ASC NULLS FIRST], false, 0
(19) Project
Output [10]: [txn#29953, CASE WHEN isnotnull(_extract_path#30158) THEN struct(path, add_path_canonical#30022, partitionValues, _extract_partitionValues#30159, size, _extract_size#30160L, modificationTime, _extract_modificationTime#30161L, dataChange, _extract_dataChange#30162, stats, add_stats_to_use#30008, tags, _extract_tags#30163, deletionVector, _extract_deletionVector#30164, baseRowId, _extract_baseRowId#30165L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#30166L, clusteringProvider, _extract_clusteringProvider#30167) END AS add#30063, CASE WHEN isnotnull(remove#29955.path) THEN if (isnull(remove#29955)) null else named_struct(path, remove_path_canonical#30041, deletionTimestamp, remove#29955.deletionTimestamp, dataChange, remove#29955.dataChange, extendedFileMetadata, remove#29955.extendedFileMetadata, partitionValues, remove#29955.partitionValues, size, remove#29955.size, tags, remove#29955.tags, deletionVector, remove#29955.deletionVector, baseRowId, remove#29955.baseRowId, defaultRowCommitVersion, remove#29955.defaultRowCommitVersion, stats, remove#29955.stats) END AS remove#30088, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962]
Input [23]: [txn#29953, _extract_path#30158, _extract_partitionValues#30159, _extract_size#30160L, _extract_modificationTime#30161L, _extract_dataChange#30162, _extract_tags#30163, _extract_deletionVector#30164, _extract_baseRowId#30165L, _extract_defaultRowCommitVersion#30166L, _extract_clusteringProvider#30167, remove#29955, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962, version#29963L, add_stats_to_use#30008, add_path_canonical#30022, remove_path_canonical#30041]
(20) DeserializeToObject
Input [10]: [txn#29953, add#30063, remove#30088, metaData#29956, protocol#29957, cdc#29958, checkpointMetadata#29959, sidecar#29960, domainMetadata#29961, commitInfo#29962]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#30135: org.apache.spark.sql.delta.actions.SingleAction
(21) MapPartitions
Input [1]: [obj#30135]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@7b63787e, obj#30136: org.apache.spark.sql.delta.actions.SingleAction
(22) SerializeFromObject
Input [1]: [obj#30136]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#30137, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#30138, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#30139, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#30140, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#30141, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#30142, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#30143, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#30144, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#30145, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#30146]
(23) AdaptiveSparkPlan
Output [10]: [txn#30137, add#30138, remove#30139, metaData#30140, protocol#30141, cdc#30142, checkpointMetadata#30143, sidecar#30144, domainMetadata#30145, commitInfo#30146]
Arguments: isFinalPlan=true