digraph G {
0 [labelType="html" label="<br><b>AdaptiveSparkPlan</b><br><br>"];
subgraph cluster1 {
isCluster="true";
label="WholeStageCodegen (4)";
2 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
}
3 [labelType="html" label="<br><b>MapPartitions</b><br><br>"];
4 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
subgraph cluster5 {
isCluster="true";
label="WholeStageCodegen (3)";
6 [labelType="html" label="<br><b>Project</b><br><br>"];
7 [labelType="html" label="<br><b>Sort</b><br><br>"];
}
8 [labelType="html" label="<b>Exchange</b><br><br>shuffle records written: 32<br>shuffle write time total (min, med, max (stageId: taskId))<br>11 ms (0 ms, 1 ms, 1 ms (stage 40.0: task 354))<br>data size total (min, med, max (stageId: taskId))<br>32.2 KiB (3.3 KiB, 3.3 KiB, 5.8 KiB (stage 40.0: task 346))<br>number of partitions: 50<br>shuffle bytes written total (min, med, max (stageId: taskId))<br>16.2 KiB (1779.0 B, 1791.0 B, 2.3 KiB (stage 40.0: task 346))"];
9 [labelType="html" label="<br><b>Union</b><br><br>"];
subgraph cluster10 {
isCluster="true";
label="WholeStageCodegen (1)\n \nduration: 105 ms";
11 [labelType="html" label="<br><b>Project</b><br><br>"];
12 [labelType="html" label="<b>ColumnarToRow</b><br><br>number of output rows: 8<br>number of input batches: 1"];
}
13 [labelType="html" label="<b>Scan parquet </b><br><br>number of files read: 1<br>scan time: 100 ms<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 26.3 KiB<br>number of output rows: 8<br>number of partitions read: 1"];
subgraph cluster14 {
isCluster="true";
label="WholeStageCodegen (2)\n \nduration: total (min, med, max (stageId: taskId))\n304 ms (30 ms, 43 ms, 45 ms (stage 40.0: task 347))";
15 [labelType="html" label="<br><b>Project</b><br><br>"];
}
16 [labelType="html" label="<b>Scan json </b><br><br>number of files read: 8<br>dynamic partition pruning time: 0 ms<br>metadata time: 0 ms<br>size of files read: 18.5 KiB<br>number of output rows: 24<br>number of partitions read: 8"];
2->0;
3->2;
4->3;
6->4;
7->6;
8->7;
9->8;
11->9;
12->11;
13->12;
15->9;
16->15;
}
17
AdaptiveSparkPlan isFinalPlan=true
SerializeFromObject [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#4376, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#4377, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#4378, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#4379, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#4380, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#4381, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#4382, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#4383, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#4384, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#4385]
WholeStageCodegen (4)
MapPartitions org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@107cd5b3, obj#4375: org.apache.spark.sql.delta.actions.SingleAction
DeserializeToObject newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#4374: org.apache.spark.sql.delta.actions.SingleAction
Project [txn#4192, CASE WHEN isnotnull(_extract_path#4397) THEN struct(path, add_path_canonical#4261, partitionValues, _extract_partitionValues#4398, size, _extract_size#4399L, modificationTime, _extract_modificationTime#4400L, dataChange, _extract_dataChange#4401, stats, add_stats_to_use#4247, tags, _extract_tags#4402, deletionVector, _extract_deletionVector#4403, baseRowId, _extract_baseRowId#4404L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#4405L, clusteringProvider, _extract_clusteringProvider#4406) END AS add#4302, CASE WHEN isnotnull(remove#4194.path) THEN if (isnull(remove#4194)) null else named_struct(path, remove_path_canonical#4280, deletionTimestamp, remove#4194.deletionTimestamp, dataChange, remove#4194.dataChange, extendedFileMetadata, remove#4194.extendedFileMetadata, partitionValues, remove#4194.partitionValues, size, remove#4194.size, tags, remove#4194.tags, deletionVector, remove#4194.deletionVector, baseRowId, remove#4194.baseRowId, defaultRowCommitVersion, remove#4194.defaultRowCommitVersion, stats, remove#4194.stats) END AS remove#4327, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201]
Sort [version#4202L ASC NULLS FIRST], false, 0
WholeStageCodegen (3)
Exchange hashpartitioning(coalesce(add_path_canonical#4261, remove_path_canonical#4280), 50), REPARTITION_BY_NUM, [plan_id=1506]
Union
Project [txn#4192, add#4193.path AS _extract_path#4397, add#4193.partitionValues AS _extract_partitionValues#4398, add#4193.size AS _extract_size#4399L, add#4193.modificationTime AS _extract_modificationTime#4400L, add#4193.dataChange AS _extract_dataChange#4401, add#4193.tags AS _extract_tags#4402, add#4193.deletionVector AS _extract_deletionVector#4403, add#4193.baseRowId AS _extract_baseRowId#4404L, add#4193.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4405L, add#4193.clusteringProvider AS _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add#4193.stats AS add_stats_to_use#4247, CASE WHEN isnotnull(add#4193.path) THEN UDF(add#4193.path) END AS add_path_canonical#4261, CASE WHEN isnotnull(remove#4194.path) THEN UDF(remove#4194.path) END AS remove_path_canonical#4280]
ColumnarToRow
WholeStageCodegen (1)
FileScan parquet [txn#4192,add#4193,remove#4194,metaData#4195,protocol#4196,cdc#4197,checkpointMetadata#4198,sidecar#4199,domainMetadata#4200,commitInfo#4201,version#4202L] Batched: true, DataFilters: [], Format: Parquet, Location: DeltaLogFileIndex(1 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
Project [txn#4214, add#4215.path AS _extract_path#4407, add#4215.partitionValues AS _extract_partitionValues#4408, add#4215.size AS _extract_size#4409L, add#4215.modificationTime AS _extract_modificationTime#4410L, add#4215.dataChange AS _extract_dataChange#4411, add#4215.tags AS _extract_tags#4412, add#4215.deletionVector AS _extract_deletionVector#4413, add#4215.baseRowId AS _extract_baseRowId#4414L, add#4215.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4415L, add#4215.clusteringProvider AS _extract_clusteringProvider#4416, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L, add#4215.stats AS add_stats_to_use#4386, CASE WHEN isnotnull(add#4215.path) THEN UDF(add#4215.path) END AS add_path_canonical#4417, CASE WHEN isnotnull(remove#4216.path) THEN UDF(remove#4216.path) END AS remove_path_canonical#4418]
WholeStageCodegen (2)
FileScan json [txn#4214,add#4215,remove#4216,metaData#4217,protocol#4218,cdc#4219,checkpointMetadata#4220,sidecar#4221,domainMetadata#4222,commitInfo#4223,version#4224L] Batched: false, DataFilters: [], Format: JSON, Location: DeltaLogFileIndex(8 paths)[hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanac..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitio...
== Physical Plan ==
AdaptiveSparkPlan (23)
+- == Final Plan ==
* SerializeFromObject (13)
+- MapPartitions (12)
+- DeserializeToObject (11)
+- * Project (10)
+- * Sort (9)
+- ShuffleQueryStage (8), Statistics(sizeInBytes=32.2 KiB, rowCount=32)
+- Exchange (7)
+- Union (6)
:- * Project (3)
: +- * ColumnarToRow (2)
: +- Scan parquet (1)
+- * Project (5)
+- Scan json (4)
+- == Initial Plan ==
SerializeFromObject (22)
+- MapPartitions (21)
+- DeserializeToObject (20)
+- Project (19)
+- Sort (18)
+- Exchange (17)
+- Union (16)
:- Project (14)
: +- Scan parquet (1)
+- Project (15)
+- Scan json (4)
(1) Scan parquet
Output [11]: [txn#4192, add#4193, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L]
Batched: true
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/order-grouping-profile/_delta_log/00000000000000000020.checkpoint.parquet]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(2) ColumnarToRow [codegen id : 1]
Input [11]: [txn#4192, add#4193, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L]
(3) Project [codegen id : 1]
Output [23]: [txn#4192, add#4193.path AS _extract_path#4397, add#4193.partitionValues AS _extract_partitionValues#4398, add#4193.size AS _extract_size#4399L, add#4193.modificationTime AS _extract_modificationTime#4400L, add#4193.dataChange AS _extract_dataChange#4401, add#4193.tags AS _extract_tags#4402, add#4193.deletionVector AS _extract_deletionVector#4403, add#4193.baseRowId AS _extract_baseRowId#4404L, add#4193.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4405L, add#4193.clusteringProvider AS _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add#4193.stats AS add_stats_to_use#4247, CASE WHEN isnotnull(add#4193.path) THEN UDF(add#4193.path) END AS add_path_canonical#4261, CASE WHEN isnotnull(remove#4194.path) THEN UDF(remove#4194.path) END AS remove_path_canonical#4280]
Input [11]: [txn#4192, add#4193, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L]
(4) Scan json
Output [11]: [txn#4214, add#4215, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L]
Batched: false
Location: DeltaLogFileIndex [hdlfs://2e93940d-4be8-4f12-830d-f0b8d392c03a.files.hdl.prod-eu20.hanacloud.ondemand.com:443/crp-dl-stream-service/prp/order-grouping-profile/_delta_log/00000000000000000021.json, ... 7 entries]
ReadSchema: struct<txn:struct<appId:string,version:bigint,lastUpdated:bigint>,add:struct<path:string,partitionValues:map<string,string>,size:bigint,modificationTime:bigint,dataChange:boolean,stats:string,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,clusteringProvider:string>,remove:struct<path:string,deletionTimestamp:bigint,dataChange:boolean,extendedFileMetadata:boolean,partitionValues:map<string,string>,size:bigint,tags:map<string,string>,deletionVector:struct<storageType:string,pathOrInlineDv:string,offset:int,sizeInBytes:int,cardinality:bigint,maxRowIndex:bigint>,baseRowId:bigint,defaultRowCommitVersion:bigint,stats:string>,metaData:struct<id:string,name:string,description:string,format:struct<provider:string,options:map<string,string>>,schemaString:string,partitionColumns:array<string>,configuration:map<string,string>,createdTime:bigint>,protocol:struct<minReaderVersion:int,minWriterVersion:int,readerFeatures:array<string>,writerFeatures:array<string>>,cdc:struct<path:string,partitionValues:map<string,string>,size:bigint,tags:map<string,string>>,checkpointMetadata:struct<version:bigint,tags:map<string,string>>,sidecar:struct<path:string,sizeInBytes:bigint,modificationTime:bigint,tags:map<string,string>>,domainMetadata:struct<domain:string,configuration:string,removed:boolean>,commitInfo:struct<version:bigint,inCommitTimestamp:bigint,timestamp:timestamp,userId:string,userName:string,operation:string,operationParameters:map<string,string>,job:struct<jobId:string,jobName:string,jobRunId:string,runId:string,jobOwnerId:string,triggerType:string>,notebook:struct<notebookId:string>,clusterId:string,readVersion:bigint,isolationLevel:string,isBlindAppend:boolean,operationMetrics:map<string,string>,userMetadata:string,tags:map<string,string>,engineInfo:string,txnId:string>>
(5) Project [codegen id : 2]
Output [23]: [txn#4214, add#4215.path AS _extract_path#4407, add#4215.partitionValues AS _extract_partitionValues#4408, add#4215.size AS _extract_size#4409L, add#4215.modificationTime AS _extract_modificationTime#4410L, add#4215.dataChange AS _extract_dataChange#4411, add#4215.tags AS _extract_tags#4412, add#4215.deletionVector AS _extract_deletionVector#4413, add#4215.baseRowId AS _extract_baseRowId#4414L, add#4215.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4415L, add#4215.clusteringProvider AS _extract_clusteringProvider#4416, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L, add#4215.stats AS add_stats_to_use#4386, CASE WHEN isnotnull(add#4215.path) THEN UDF(add#4215.path) END AS add_path_canonical#4417, CASE WHEN isnotnull(remove#4216.path) THEN UDF(remove#4216.path) END AS remove_path_canonical#4418]
Input [11]: [txn#4214, add#4215, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L]
(6) Union
(7) Exchange
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
Arguments: hashpartitioning(coalesce(add_path_canonical#4261, remove_path_canonical#4280), 50), REPARTITION_BY_NUM, [plan_id=1506]
(8) ShuffleQueryStage
Output [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
Arguments: 0
(9) Sort [codegen id : 3]
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
Arguments: [version#4202L ASC NULLS FIRST], false, 0
(10) Project [codegen id : 3]
Output [10]: [txn#4192, CASE WHEN isnotnull(_extract_path#4397) THEN struct(path, add_path_canonical#4261, partitionValues, _extract_partitionValues#4398, size, _extract_size#4399L, modificationTime, _extract_modificationTime#4400L, dataChange, _extract_dataChange#4401, stats, add_stats_to_use#4247, tags, _extract_tags#4402, deletionVector, _extract_deletionVector#4403, baseRowId, _extract_baseRowId#4404L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#4405L, clusteringProvider, _extract_clusteringProvider#4406) END AS add#4302, CASE WHEN isnotnull(remove#4194.path) THEN if (isnull(remove#4194)) null else named_struct(path, remove_path_canonical#4280, deletionTimestamp, remove#4194.deletionTimestamp, dataChange, remove#4194.dataChange, extendedFileMetadata, remove#4194.extendedFileMetadata, partitionValues, remove#4194.partitionValues, size, remove#4194.size, tags, remove#4194.tags, deletionVector, remove#4194.deletionVector, baseRowId, remove#4194.baseRowId, defaultRowCommitVersion, remove#4194.defaultRowCommitVersion, stats, remove#4194.stats) END AS remove#4327, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201]
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
(11) DeserializeToObject
Input [10]: [txn#4192, add#4302, remove#4327, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#4374: org.apache.spark.sql.delta.actions.SingleAction
(12) MapPartitions
Input [1]: [obj#4374]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@107cd5b3, obj#4375: org.apache.spark.sql.delta.actions.SingleAction
(13) SerializeFromObject [codegen id : 4]
Input [1]: [obj#4375]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#4376, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#4377, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#4378, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#4379, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#4380, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#4381, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#4382, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#4383, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#4384, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#4385]
(14) Project
Output [23]: [txn#4192, add#4193.path AS _extract_path#4397, add#4193.partitionValues AS _extract_partitionValues#4398, add#4193.size AS _extract_size#4399L, add#4193.modificationTime AS _extract_modificationTime#4400L, add#4193.dataChange AS _extract_dataChange#4401, add#4193.tags AS _extract_tags#4402, add#4193.deletionVector AS _extract_deletionVector#4403, add#4193.baseRowId AS _extract_baseRowId#4404L, add#4193.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4405L, add#4193.clusteringProvider AS _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add#4193.stats AS add_stats_to_use#4247, CASE WHEN isnotnull(add#4193.path) THEN UDF(add#4193.path) END AS add_path_canonical#4261, CASE WHEN isnotnull(remove#4194.path) THEN UDF(remove#4194.path) END AS remove_path_canonical#4280]
Input [11]: [txn#4192, add#4193, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L]
(15) Project
Output [23]: [txn#4214, add#4215.path AS _extract_path#4407, add#4215.partitionValues AS _extract_partitionValues#4408, add#4215.size AS _extract_size#4409L, add#4215.modificationTime AS _extract_modificationTime#4410L, add#4215.dataChange AS _extract_dataChange#4411, add#4215.tags AS _extract_tags#4412, add#4215.deletionVector AS _extract_deletionVector#4413, add#4215.baseRowId AS _extract_baseRowId#4414L, add#4215.defaultRowCommitVersion AS _extract_defaultRowCommitVersion#4415L, add#4215.clusteringProvider AS _extract_clusteringProvider#4416, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L, add#4215.stats AS add_stats_to_use#4386, CASE WHEN isnotnull(add#4215.path) THEN UDF(add#4215.path) END AS add_path_canonical#4417, CASE WHEN isnotnull(remove#4216.path) THEN UDF(remove#4216.path) END AS remove_path_canonical#4418]
Input [11]: [txn#4214, add#4215, remove#4216, metaData#4217, protocol#4218, cdc#4219, checkpointMetadata#4220, sidecar#4221, domainMetadata#4222, commitInfo#4223, version#4224L]
(16) Union
(17) Exchange
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
Arguments: hashpartitioning(coalesce(add_path_canonical#4261, remove_path_canonical#4280), 50), REPARTITION_BY_NUM, [plan_id=1478]
(18) Sort
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
Arguments: [version#4202L ASC NULLS FIRST], false, 0
(19) Project
Output [10]: [txn#4192, CASE WHEN isnotnull(_extract_path#4397) THEN struct(path, add_path_canonical#4261, partitionValues, _extract_partitionValues#4398, size, _extract_size#4399L, modificationTime, _extract_modificationTime#4400L, dataChange, _extract_dataChange#4401, stats, add_stats_to_use#4247, tags, _extract_tags#4402, deletionVector, _extract_deletionVector#4403, baseRowId, _extract_baseRowId#4404L, defaultRowCommitVersion, _extract_defaultRowCommitVersion#4405L, clusteringProvider, _extract_clusteringProvider#4406) END AS add#4302, CASE WHEN isnotnull(remove#4194.path) THEN if (isnull(remove#4194)) null else named_struct(path, remove_path_canonical#4280, deletionTimestamp, remove#4194.deletionTimestamp, dataChange, remove#4194.dataChange, extendedFileMetadata, remove#4194.extendedFileMetadata, partitionValues, remove#4194.partitionValues, size, remove#4194.size, tags, remove#4194.tags, deletionVector, remove#4194.deletionVector, baseRowId, remove#4194.baseRowId, defaultRowCommitVersion, remove#4194.defaultRowCommitVersion, stats, remove#4194.stats) END AS remove#4327, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201]
Input [23]: [txn#4192, _extract_path#4397, _extract_partitionValues#4398, _extract_size#4399L, _extract_modificationTime#4400L, _extract_dataChange#4401, _extract_tags#4402, _extract_deletionVector#4403, _extract_baseRowId#4404L, _extract_defaultRowCommitVersion#4405L, _extract_clusteringProvider#4406, remove#4194, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201, version#4202L, add_stats_to_use#4247, add_path_canonical#4261, remove_path_canonical#4280]
(20) DeserializeToObject
Input [10]: [txn#4192, add#4302, remove#4327, metaData#4195, protocol#4196, cdc#4197, checkpointMetadata#4198, sidecar#4199, domainMetadata#4200, commitInfo#4201]
Arguments: newInstance(class org.apache.spark.sql.delta.actions.SingleAction), obj#4374: org.apache.spark.sql.delta.actions.SingleAction
(21) MapPartitions
Input [1]: [obj#4374]
Arguments: org.apache.spark.sql.delta.Snapshot$$Lambda$5789/0x00007f71e98be290@107cd5b3, obj#4375: org.apache.spark.sql.delta.actions.SingleAction
(22) SerializeFromObject
Input [1]: [obj#4375]
Arguments: [if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn)) null else named_struct(appId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).appId, true, false, true), version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).version, lastUpdated, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).txn).lastUpdated)) AS txn#4376, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).size, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).modificationTime, dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).dataChange, stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).stats, true, false, true), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).defaultRowCommitVersion), clusteringProvider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).add).clusteringProvider), true, false, true)) AS add#4377, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).path, true, false, true), deletionTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionTimestamp), dataChange, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).dataChange, extendedFileMetadata, unwrapoption(BooleanType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).extendedFileMetadata), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -5), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -6), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).partitionValues), size, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).size), tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -7), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -8), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).tags), deletionVector, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).deletionVector).maxRowIndex)), baseRowId, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).baseRowId), defaultRowCommitVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).defaultRowCommitVersion), stats, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).remove).stats, true, false, true)) AS remove#4378, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData)) null else named_struct(id, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).id, true, false, true), name, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).name, true, false, true), description, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).description, true, false, true), format, if (isnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format)) null else named_struct(provider, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).provider, true, false, true), options, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -9), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -10), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).format).options)), schemaString, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).schemaString, true, false, true), partitionColumns, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -11), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).partitionColumns, None), configuration, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -12), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -13), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).configuration), createdTime, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).metaData).createdTime)) AS metaData#4379, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol)) null else named_struct(minReaderVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minReaderVersion, minWriterVersion, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).minWriterVersion, readerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -14), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).readerFeatures).toSeq, None), writerFeatures, mapobjects(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(MapObject, ObjectType(class java.lang.Object), true, -15), StringType, ObjectType(class java.lang.String)), true, false, true), unwrapoption(ObjectType(interface scala.collection.immutable.Set), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).protocol).writerFeatures).toSeq, None)) AS protocol#4380, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).path, true, false, true), partitionValues, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -16), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -17), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).partitionValues), size, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).size, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -18), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -19), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).cdc).tags)) AS cdc#4381, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata)) null else named_struct(version, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).version, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -20), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -21), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).checkpointMetadata).tags)) AS checkpointMetadata#4382, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar)) null else named_struct(path, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).path, true, false, true), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).sizeInBytes, modificationTime, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).modificationTime, tags, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -22), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -23), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).sidecar).tags)) AS sidecar#4383, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata)) null else named_struct(domain, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).domain, true, false, true), configuration, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).configuration, true, false, true), removed, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).domainMetadata).removed) AS domainMetadata#4384, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo)) null else named_struct(version, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).version), inCommitTimestamp, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).inCommitTimestamp), timestamp, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, TimestampType, fromJavaTimestamp, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).timestamp, true, false, true), userId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userId), true, false, true), userName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).userName), true, false, true), operation, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operation, true, false, true), operationParameters, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -24), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -25), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).operationParameters), job, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job))) null else named_struct(jobId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobId, true, false, true), jobName, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobName, true, false, true), jobRunId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobRunId, true, false, true), runId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).runId, true, false, true), jobOwnerId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).jobOwnerId, true, false, true), triggerType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.JobInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).job)).triggerType, true, false, true)), notebook, if (isnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook))) null else named_struct(notebookId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(unwrapoption(ObjectType(class org.apache.spark.sql.delta.actions.NotebookInfo), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).notebook)).notebookId, true, false, true)), clusterId, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).clusterId), true, false, true), readVersion, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).readVersion), isolationLevel, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.SingleAction, true])).commitInfo).isolationLevel), true, false, true), ... 12 more fields) AS commitInfo#4385]
(23) AdaptiveSparkPlan
Output [10]: [txn#4376, add#4377, remove#4378, metaData#4379, protocol#4380, cdc#4381, checkpointMetadata#4382, sidecar#4383, domainMetadata#4384, commitInfo#4385]
Arguments: isFinalPlan=true