digraph G {
0 [labelType="html" label="<br><b>AdaptiveSparkPlan</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (4)";
2 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
}
3 [labelType="html" label="<br><b>MapPartitions</b><br><br>"];
4 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
subgraph cluster5 {
isCluster="true";
label="WholeStageCodegen (3)";
6 [labelType="html" label="<br><b>Project</b><br><br>"];
7 [labelType="html" label="<br><b>Sort</b><br><br>"];
}
8 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 32<br>shuffle write time total (min, med, max (stageId: taskId))<br>7 ms (0 ms, 0 ms, 1 ms (stage 351.0: task 2620))<br>data size total (min, med, max (stageId: taskId))<br>32.2 KiB (3.3 KiB, 3.3 KiB, 5.8 KiB (stage 351.0: task 2616))<br>number of partitions: 50<br>shuffle bytes written total (min, med, max (stageId: taskId))<br>16.2 KiB (1779.0 B, 1791.0 B, 2.3 KiB (stage 351.0: task 2616))"];
9 [labelType="html" label="<br><b>Union</b><br><br>"];
subgraph cluster10 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: 107 ms";
11 [labelType="html" label="<br><b>Project</b><br><br>"];
12 [labelType="html" label="<b>ColumnarToRow</b><br><br>number of output rows: 8<br>number of input batches: 1"];
}
13 [labelType="html" label="<b>Scan parquet </b><br><br>number of files read: 1<br>scan time: 103 ms<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 26.3 KiB<br>number of output rows: 8<br>number of partitions read: 1"];
subgraph cluster14 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: total (min, med, max (stageId: taskId))\n264 ms (29 ms, 34 ms, 38 ms (stage 351.0: task 2618))";
15 [labelType="html" label="<br><b>Project</b><br><br>"];
}
16 [labelType="html" label="<b>Scan json </b><br><br>number of files read: 8<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 18.5 KiB<br>number of output rows: 24<br>number of partitions read: 8"];
2->0;
3->2;
4->3;
6->4;
7->6;
8->7;
9->8;
11->9;
12->11;
13->12;
15->9;
16->15;
}
17
AdaptiveSparkPlan isFinalPlan=true
SerializeFromObject [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#33844, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#33845, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#33846, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#33847, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#33848, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#33849, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#33850, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#33851, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#33852, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#33853]
WholeStageCodegen (4)
MapPartitions org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@766591c4, obj#33843: org.apache.spark.sql.delta.actions.SingleAction
DeserializeToObject newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#33842: org.apache.spark.sql.delta.actions.SingleAction
Project [txn#33660, CASE WHEN isnotnull(_extract_path#33865) THEN struct(path, add_path_canonical#33729, partitionValues, _extract_partitionValues#33866, size, _extract_size#33867L, modificationTime, _extract_modificationTime#33868L, dataChange, _extract_dataChange#33869, stats, add_stats_to_use#33715, tags, _extract_tags#33870, deletionVector, _extract_deletionVector#33871, baseRowId, _extract_baseRowId#33872L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#33873L, clusteringProvider, _extract_clusteringProvider#33874) END AS add#33770, CASE WHEN isnotnull(remove#33662.path) THEN if (isnull(remove#33662)) null else named_struct(path, remove_path_canonical#33748, deletionTimestamp, remove#33662.deletionTimestamp, dataChange, remove#33662.dataChange, extendedFileMetadata, remove#33662.extendedFileMetadata, partitionValues, remove#33662.partitionValues, size, remove#33662.size, tags, remove#33662.tags, deletionVector, remove#33662.deletionVector, baseRowId, remove#33662.baseRowId, defaultRowCommitVersion, remove#33662.defaultRowCommitVersion, stats, remove#33662.stats) END AS remove#33795, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669]
Sort [version#33670L ASC NULLS FIRST], false, 0
WholeStageCodegen (3)
Exchange hashpartitioning(coalesce(add_path_canonical#33729, remove_path_canonical#33748), 50), REPARTITION_BY_NUM, [plan_id=10403]
Union
Project [txn#33660, add#33661.path AS _extract_path#33865, add#33661.partitionValues AS _extract_partitionValues#33866, add#33661.size AS _extract_size#33867L, add#33661.modificationTime AS _extract_modificationTime#33868L, add#33661.dataChange AS _extract_dataChange#33869, add#33661.tags AS _extract_tags#33870, add#33661.deletionVector AS _extract_deletionVector#33871, add#33661.baseRowId AS _extract_baseRowId#33872L, add#33661.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33873L, add#33661.clusteringProvider AS _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add#33661.stats AS add_stats_to_use#33715, CASE WHEN isnotnull(add#33661.path) THEN UDF(add#33661.path) END AS add_path_canonical#33729, CASE WHEN isnotnull(remove#33662.path) THEN UDF(remove#33662.path) END AS remove_path_canonical#33748]
ColumnarToRow
WholeStageCodegen (1)
FileScan parquet [txn#33660,add#33661,remove#33662,metaData#33663,protocol#33664,cdc#33665,checkpointMetadata#33666,sidecar#33667,domainMetadata#33668,commitInfo#33669,version#33670L] Batched: true, DataFilters: [], Format: Parquet, Location: DeltaLogFileIndex(1 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
Project [txn#33682, add#33683.path AS _extract_path#33875, add#33683.partitionValues AS _extract_partitionValues#33876, add#33683.size AS _extract_size#33877L, add#33683.modificationTime AS _extract_modificationTime#33878L, add#33683.dataChange AS _extract_dataChange#33879, add#33683.tags AS _extract_tags#33880, add#33683.deletionVector AS _extract_deletionVector#33881, add#33683.baseRowId AS _extract_baseRowId#33882L, add#33683.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33883L, add#33683.clusteringProvider AS _extract_clusteringProvider#33884, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L, add#33683.stats AS add_stats_to_use#33854, CASE WHEN isnotnull(add#33683.path) THEN UDF(add#33683.path) END AS add_path_canonical#33885, CASE WHEN isnotnull(remove#33684.path) THEN UDF(remove#33684.path) END AS remove_path_canonical#33886]
WholeStageCodegen (2)
FileScan json [txn#33682,add#33683,remove#33684,metaData#33685,protocol#33686,cdc#33687,checkpointMetadata#33688,sidecar#33689,domainMetadata#33690,commitInfo#33691,version#33692L] Batched: false, DataFilters: [], Format: JSON, Location: DeltaLogFileIndex(8 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
== Physical Plan ==
AdaptiveSparkPlan (23)
+- == Final Plan ==
* SerializeFromObject (13)
+- MapPartitions (12)
+- DeserializeToObject (11)
+- * Project (10)
+- * Sort (9)
+- ShuffleQueryStage (8), Statistics(sizeInBytes=32.2 KiB, rowCount=32)
+- Exchange (7)
+- Union (6)
:- * Project (3)
: +- * ColumnarToRow (2)
: +- Scan parquet (1)
+- * Project (5)
+- Scan json (4)
+- == Initial Plan ==
SerializeFromObject (22)
+- MapPartitions (21)
+- DeserializeToObject (20)
+- Project (19)
+- Sort (18)
+- Exchange (17)
+- Union (16)
:- Project (14)
: +- Scan parquet (1)
+- Project (15)
+- Scan json (4)
(1) Scan parquet
Output [11]: [txn#33660, add#33661, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L]
Batched: true
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/order-grouping-profile/_delta_log/00000000000000000020.checkpoint.parquet]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(2) ColumnarToRow [codegen id : 1]
Input [11]: [txn#33660, add#33661, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L]
(3) Project [codegen id : 1]
Output [23]: [txn#33660, add#33661.path AS _extract_path#33865, add#33661.partitionValues AS _extract_partitionValues#33866, add#33661.size AS _extract_size#33867L, add#33661.modificationTime AS _extract_modificationTime#33868L, add#33661.dataChange AS _extract_dataChange#33869, add#33661.tags AS _extract_tags#33870, add#33661.deletionVector AS _extract_deletionVector#33871, add#33661.baseRowId AS _extract_baseRowId#33872L, add#33661.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33873L, add#33661.clusteringProvider AS _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add#33661.stats AS add_stats_to_use#33715, CASE WHEN isnotnull(add#33661.path) THEN UDF(add#33661.path) END AS add_path_canonical#33729, CASE WHEN isnotnull(remove#33662.path) THEN UDF(remove#33662.path) END AS remove_path_canonical#33748]
Input [11]: [txn#33660, add#33661, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L]
(4) Scan json
Output [11]: [txn#33682, add#33683, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L]
Batched: false
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/order-grouping-profile/_delta_log/00000000000000000021.json, ... 7 entries]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(5) Project [codegen id : 2]
Output [23]: [txn#33682, add#33683.path AS _extract_path#33875, add#33683.partitionValues AS _extract_partitionValues#33876, add#33683.size AS _extract_size#33877L, add#33683.modificationTime AS _extract_modificationTime#33878L, add#33683.dataChange AS _extract_dataChange#33879, add#33683.tags AS _extract_tags#33880, add#33683.deletionVector AS _extract_deletionVector#33881, add#33683.baseRowId AS _extract_baseRowId#33882L, add#33683.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33883L, add#33683.clusteringProvider AS _extract_clusteringProvider#33884, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L, add#33683.stats AS add_stats_to_use#33854, CASE WHEN isnotnull(add#33683.path) THEN UDF(add#33683.path) END AS add_path_canonical#33885, CASE WHEN isnotnull(remove#33684.path) THEN UDF(remove#33684.path) END AS remove_path_canonical#33886]
Input [11]: [txn#33682, add#33683, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L]
(6) Union
(7) Exchange
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
Arguments: hashpartitioning(coalesce(add_path_canonical#33729, remove_path_canonical#33748), 50), REPARTITION_BY_NUM, [plan_id=10403]
(8) ShuffleQueryStage
Output [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
Arguments: 0
(9) Sort [codegen id : 3]
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
Arguments: [version#33670L ASC NULLS FIRST], false, 0
(10) Project [codegen id : 3]
Output [10]: [txn#33660, CASE WHEN isnotnull(_extract_path#33865) THEN struct(path, add_path_canonical#33729, partitionValues, _extract_partitionValues#33866, size, _extract_size#33867L, modificationTime, _extract_modificationTime#33868L, dataChange, _extract_dataChange#33869, stats, add_stats_to_use#33715, tags, _extract_tags#33870, deletionVector, _extract_deletionVector#33871, baseRowId, _extract_baseRowId#33872L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#33873L, clusteringProvider, _extract_clusteringProvider#33874) END AS add#33770, CASE WHEN isnotnull(remove#33662.path) THEN if (isnull(remove#33662)) null else named_struct(path, remove_path_canonical#33748, deletionTimestamp, remove#33662.deletionTimestamp, dataChange, remove#33662.dataChange, extendedFileMetadata, remove#33662.extendedFileMetadata, partitionValues, remove#33662.partitionValues, size, remove#33662.size, tags, remove#33662.tags, deletionVector, remove#33662.deletionVector, baseRowId, remove#33662.baseRowId, defaultRowCommitVersion, remove#33662.defaultRowCommitVersion, stats, remove#33662.stats) END AS remove#33795, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669]
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
(11) DeserializeToObject
Input [10]: [txn#33660, add#33770, remove#33795, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#33842: org.apache.spark.sql.delta.actions.SingleAction
(12) MapPartitions
Input [1]: [obj#33842]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@766591c4, obj#33843: org.apache.spark.sql.delta.actions.SingleAction
(13) SerializeFromObject [codegen id : 4]
Input [1]: [obj#33843]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#33844, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#33845, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#33846, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#33847, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#33848, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#33849, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#33850, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#33851, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#33852, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#33853]
(14) Project
Output [23]: [txn#33660, add#33661.path AS _extract_path#33865, add#33661.partitionValues AS _extract_partitionValues#33866, add#33661.size AS _extract_size#33867L, add#33661.modificationTime AS _extract_modificationTime#33868L, add#33661.dataChange AS _extract_dataChange#33869, add#33661.tags AS _extract_tags#33870, add#33661.deletionVector AS _extract_deletionVector#33871, add#33661.baseRowId AS _extract_baseRowId#33872L, add#33661.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33873L, add#33661.clusteringProvider AS _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add#33661.stats AS add_stats_to_use#33715, CASE WHEN isnotnull(add#33661.path) THEN UDF(add#33661.path) END AS add_path_canonical#33729, CASE WHEN isnotnull(remove#33662.path) THEN UDF(remove#33662.path) END AS remove_path_canonical#33748]
Input [11]: [txn#33660, add#33661, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L]
(15) Project
Output [23]: [txn#33682, add#33683.path AS _extract_path#33875, add#33683.partitionValues AS _extract_partitionValues#33876, add#33683.size AS _extract_size#33877L, add#33683.modificationTime AS _extract_modificationTime#33878L, add#33683.dataChange AS _extract_dataChange#33879, add#33683.tags AS _extract_tags#33880, add#33683.deletionVector AS _extract_deletionVector#33881, add#33683.baseRowId AS _extract_baseRowId#33882L, add#33683.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#33883L, add#33683.clusteringProvider AS _extract_clusteringProvider#33884, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L, add#33683.stats AS add_stats_to_use#33854, CASE WHEN isnotnull(add#33683.path) THEN UDF(add#33683.path) END AS add_path_canonical#33885, CASE WHEN isnotnull(remove#33684.path) THEN UDF(remove#33684.path) END AS remove_path_canonical#33886]
Input [11]: [txn#33682, add#33683, remove#33684, metaData#33685, protocol#33686, cdc#33687, checkpointMetadata#33688, sidecar#33689, domainMetadata#33690, commitInfo#33691, version#33692L]
(16) Union
(17) Exchange
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
Arguments: hashpartitioning(coalesce(add_path_canonical#33729, remove_path_canonical#33748), 50), REPARTITION_BY_NUM, [plan_id=10375]
(18) Sort
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
Arguments: [version#33670L ASC NULLS FIRST], false, 0
(19) Project
Output [10]: [txn#33660, CASE WHEN isnotnull(_extract_path#33865) THEN struct(path, add_path_canonical#33729, partitionValues, _extract_partitionValues#33866, size, _extract_size#33867L, modificationTime, _extract_modificationTime#33868L, dataChange, _extract_dataChange#33869, stats, add_stats_to_use#33715, tags, _extract_tags#33870, deletionVector, _extract_deletionVector#33871, baseRowId, _extract_baseRowId#33872L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#33873L, clusteringProvider, _extract_clusteringProvider#33874) END AS add#33770, CASE WHEN isnotnull(remove#33662.path) THEN if (isnull(remove#33662)) null else named_struct(path, remove_path_canonical#33748, deletionTimestamp, remove#33662.deletionTimestamp, dataChange, remove#33662.dataChange, extendedFileMetadata, remove#33662.extendedFileMetadata, partitionValues, remove#33662.partitionValues, size, remove#33662.size, tags, remove#33662.tags, deletionVector, remove#33662.deletionVector, baseRowId, remove#33662.baseRowId, defaultRowCommitVersion, remove#33662.defaultRowCommitVersion, stats, remove#33662.stats) END AS remove#33795, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669]
Input [23]: [txn#33660, _extract_path#33865, _extract_partitionValues#33866, _extract_size#33867L, _extract_modificationTime#33868L, _extract_dataChange#33869, _extract_tags#33870, _extract_deletionVector#33871, _extract_baseRowId#33872L, _extract_defaultRowCommitVersion#33873L, _extract_clusteringProvider#33874, remove#33662, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669, version#33670L, add_stats_to_use#33715, add_path_canonical#33729, remove_path_canonical#33748]
(20) DeserializeToObject
Input [10]: [txn#33660, add#33770, remove#33795, metaData#33663, protocol#33664, cdc#33665, checkpointMetadata#33666, sidecar#33667, domainMetadata#33668, commitInfo#33669]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#33842: org.apache.spark.sql.delta.actions.SingleAction
(21) MapPartitions
Input [1]: [obj#33842]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@766591c4, obj#33843: org.apache.spark.sql.delta.actions.SingleAction
(22) SerializeFromObject
Input [1]: [obj#33843]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#33844, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#33845, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#33846, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#33847, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#33848, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#33849, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#33850, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#33851, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#33852, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#33853]
(23) AdaptiveSparkPlan
Output [10]: [txn#33844, add#33845, remove#33846, metaData#33847, protocol#33848, cdc#33849, checkpointMetadata#33850, sidecar#33851, domainMetadata#33852, commitInfo#33853]
Arguments: isFinalPlan=true