|
157
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:55)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:55
|
1 s
|
[327][328][329][330][331]
|
|
|
153
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeCommonDataSnapshot(FileStorageAdapterImpl.java:283)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeCommonDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:72)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:97)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 10:34:53
|
1 s
|
[325][326]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
154
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:53
|
0.2 s
|
[323]
|
|
155
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:53
|
21 ms
|
|
|
156
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:53
|
0.2 s
|
[324]
|
|
|
152
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeOrderProposalSnapshot(FileStorageAdapterImpl.java:278)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeOrderProposalSnapshot(DataSnapshotGenerationDataAccessImpl.java:67)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:96)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 10:34:44
|
9 s
|
[313][314][315][316][317][318][319][320][321][322]
|
|
|
151
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeProdPlantSnapshot(FileStorageAdapterImpl.java:273)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeProdPlantSnapshot(DataSnapshotGenerationDataAccessImpl.java:62)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 10:34:33
|
11 s
|
[293][294][295][296][297][298][299][300][301][302][303][304][305][306][307][308][309][310][311][312]
|
|
|
150
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:32
|
82 ms
|
[292]
|
|
|
149
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:32
|
0.1 s
|
[291]
|
|
|
148
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:31
|
0.1 s
|
[290]
|
|
|
147
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:31
|
0.3 s
|
[288][289]
|
|
|
146
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:31
|
0.2 s
|
[287]
|
|
|
145
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:30
|
0.2 s
|
[286]
|
|
|
144
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:30
|
0.1 s
|
[285]
|
|
|
143
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:29
|
0.1 s
|
[283]
|
|
|
142
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:29
|
0.3 s
|
[281][282]
|
|
|
141
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:28
|
0.1 s
|
[280]
|
|
|
140
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:28
|
0.1 s
|
[279]
|
|
|
139
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:28
|
0.3 s
|
[277][278]
|
|
|
138
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:27
|
0.2 s
|
[276]
|
|
|
137
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:27
|
0.3 s
|
[275]
|
|
|
135
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:27
|
0.1 s
|
[274]
|
|
|
134
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:26
|
0.2 s
|
[273]
|
|
|
133
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:26
|
0.2 s
|
[272]
|
|
|
132
|
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 144
+details
Delta: Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 144 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:25
|
0.3 s
|
[270][271]
|
|
|
131
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:24
|
0.1 s
|
[268]
|
|
|
130
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:24
|
0.2 s
|
[267]
|
|
|
129
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:24
|
0.1 s
|
[266]
|
|
|
128
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:22
|
0.1 s
|
[264]
|
|
|
127
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.ProductPlantSnapshotEnhancementImpl.withProductPlant(ProductPlantSnapshotEnhancementImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:73)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:21
|
0.5 s
|
[261][262][263]
|
|
|
123
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.MarketUnitDeterminationImpl.getMarketUnitRow(MarketUnitDeterminationImpl.java:54)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.MarketUnitDeterminationImpl.determineMarketUnit(MarketUnitDeterminationImpl.java:27)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:69)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 10:34:20
|
0.3 s
|
[258][259]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
124
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:19
|
0.3 s
|
[256]
|
|
125
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:20
|
18 ms
|
|
|
126
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:34:20
|
0.4 s
|
[257]
|
|
|
122
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:34:19
|
0.2 s
|
[255]
|
|
|
121
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 9aa5a928-b219-375b-ac11-54040e15cd53 workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:19
|
0.2 s
|
[254]
|
|
|
120
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = aab19991-4480-31e0-94d1-1e3872bbff2f workflowType = ProcessWorkloadShardWorkflow workflowId = 9853fdbb-cc51-3855-a9da-9170fc5b0400 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = aab19991-4480-31e0-94d1-1e3872bbff2f workflowType = ProcessWorkloadShardWorkflow workflowId = 9853fdbb-cc51-3855-a9da-9170fc5b0400 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardActivityImpl.createProdPlantList(CreateProdPlantListForShardActivityImpl.java:45)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:34:01
|
1 s
|
[249][250][251][252][253]
|
|
|
119
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 961dcc42-bffb-3b54-9460-e86e2549e19e workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 961dcc42-bffb-3b54-9460-e86e2549e19e workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationActivityImpl.determineSupplyNetworkList(SupplyNetworkForShardCreationActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:53
|
1 s
|
[244][245][246][247][248]
|
|
|
111
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = aab19991-4480-31e0-94d1-1e3872bbff2f workflowType = ProcessWorkloadShardWorkflow workflowId = 9853fdbb-cc51-3855-a9da-9170fc5b0400 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = aab19991-4480-31e0-94d1-1e3872bbff2f workflowType = ProcessWorkloadShardWorkflow workflowId = 9853fdbb-cc51-3855-a9da-9170fc5b0400 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeProdPlantListForShard(FileStorageAdapterImpl.java:190)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardDataAccessImpl.storeProdPlantList(CreateProdPlantListForShardDataAccessImpl.java:29)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.ProdPlantListForShardCreationImpl.createProdPlantList(ProdPlantListForShardCreationImpl.java:30)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardActivityImpl.createProdPlantList(CreateProdPlantListForShardActivityImpl.java:43)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:51
|
10 s
|
[232][234]
|
|
|
110
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 961dcc42-bffb-3b54-9460-e86e2549e19e workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 961dcc42-bffb-3b54-9460-e86e2549e19e workflowType = PrepareDataSnapshotWorkflow workflowId = b23ef421-1eee-3d30-a73f-c7878eb04008 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeSupplyNetworkList(FileStorageAdapterImpl.java:210)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationDataAccessImpl.storeSupplyNetworkList(SupplyNetworkForShardCreationDataAccessImpl.java:29)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.SupplyNetworkForShardCreationImpl.createSupplyNetworkList(SupplyNetworkForShardCreationImpl.java:17)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationActivityImpl.determineSupplyNetworkList(SupplyNetworkForShardCreationActivityImpl.java:42)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:51
|
2 s
|
[231][233][235]
|
|
|
108
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = PrepareStreams activityId = e3aeb3ac-cdab-38d4-8651-88ee8ccad2a4 workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = PrepareStreams activityId = e3aeb3ac-cdab-38d4-8651-88ee8ccad2a4 workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.OrderQuantityOptimizationStreamingActivityImpl.prepareStreams(OrderQuantityOptimizationStreamingActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:46
|
0.9 s
|
[223][224][225][226][227]
|
|
|
103
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:64)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:43
|
1 s
|
[215][216][217][218][219]
|
|
|
102
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeShardedWorklist(FileStorageAdapterImpl.java:250)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:45)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 10:33:42
|
1 s
|
[213][214]
|
|
|
101
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.urlByShard(FileStorageAdapterImpl.java:260)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeShardedWorklist(FileStorageAdapterImpl.java:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:45)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 10:33:41
|
0.3 s
|
[208][209][210][211][212]
|
|
|
100
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.count(Dataset.scala:3615)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.calculateBinPackingMetrics(WorklistShardingImpl.java:108)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.performBinPacking(WorklistShardingImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:38)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:41
|
0.2 s
|
[205][206][207]
|
|
|
99
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.count(Dataset.scala:3615)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.calculateBinPackingMetrics(WorklistShardingImpl.java:107)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.performBinPacking(WorklistShardingImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:38)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:40
|
0.6 s
|
[195][196][197][198][199][200][201][202][203][204]
|
|
|
98
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:33:40
|
0.3 s
|
[194]
|
|
|
96
|
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 10:33:40
|
0.3 s
|
[193]
|
|
|
95
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:33:39
|
0.2 s
|
[192]
|
|
|
94
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 716699f2-34fb-38b0-856a-57fc6af8628a workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:39
|
0.2 s
|
[191]
|
|
|
93
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementActivityImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:29
|
1 s
|
[184][185][186][187][188]
|
|
|
82
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:33:24
|
0.2 s
|
[170]
|
|
|
81
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:23
|
0.1 s
|
[169]
|
|
|
80
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 10:33:21
|
0.3 s
|
[167]
|
|
|
79
|
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000096 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = e8bbd528-e182-393f-b737-ebededb77cbf workflowType = CalculateOrderProposalsWorkflow workflowId = 9e4477b5-be60-3834-83e9-5b127657b9b8 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 10:33:21
|
0.2 s
|
[166]
|
|
|
78
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:55)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:56:31
|
1 s
|
[161][162][163][164][165]
|
|
|
74
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeCommonDataSnapshot(FileStorageAdapterImpl.java:283)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeCommonDataSnapshot(DataSnapshotGenerationDataAccessImpl.java:72)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:97)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 06:56:29
|
1 s
|
[159][160]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
75
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:29
|
0.1 s
|
[157]
|
|
76
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:29
|
37 ms
|
|
|
77
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:29
|
0.3 s
|
[158]
|
|
|
73
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeOrderProposalSnapshot(FileStorageAdapterImpl.java:278)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeOrderProposalSnapshot(DataSnapshotGenerationDataAccessImpl.java:67)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:96)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 06:56:21
|
8 s
|
[147][148][149][150][151][152][153][154][155][156]
|
|
|
72
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeDataSnapshot(FileStorageAdapterImpl.java:290)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeProdPlantSnapshot(FileStorageAdapterImpl.java:273)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.DataSnapshotGenerationDataAccessImpl.storeProdPlantSnapshot(DataSnapshotGenerationDataAccessImpl.java:62)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 06:56:10
|
11 s
|
[127][128][129][130][131][132][133][134][135][136][137][138][139][140][141][142][143][144][145][146]
|
|
|
71
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:56:10
|
96 ms
|
[126]
|
|
|
70
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:09
|
0.1 s
|
[125]
|
|
|
69
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:09
|
0.1 s
|
[124]
|
|
|
68
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:08
|
0.3 s
|
[122][123]
|
|
|
67
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 29 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:08
|
0.3 s
|
[121]
|
|
|
66
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:56:06
|
0.8 s
|
[120]
|
|
|
65
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:56:06
|
0.1 s
|
[119]
|
|
|
64
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:05
|
0.1 s
|
[117]
|
|
|
63
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:05
|
0.3 s
|
[115][116]
|
|
|
62
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 2 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:05
|
0.4 s
|
[114]
|
|
|
61
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:04
|
0.1 s
|
[113]
|
|
|
60
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:04
|
0.4 s
|
[111][112]
|
|
|
59
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 28 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:03
|
0.3 s
|
[110]
|
|
|
58
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:03
|
0.4 s
|
[109]
|
|
|
56
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:02
|
0.8 s
|
[108]
|
|
|
55
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:01
|
0.2 s
|
[107]
|
|
|
54
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:01
|
0.2 s
|
[106]
|
|
|
53
|
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 143
+details
Delta: Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query: Compute snapshot for version: 143 org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1187)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:56:00
|
0.4 s
|
[104][105]
|
|
|
52
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:55:58
|
0.5 s
|
[102]
|
|
|
51
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:55:57
|
0.8 s
|
[101]
|
|
|
50
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:57
|
0.1 s
|
[100]
|
|
|
49
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:55:55
|
0.5 s
|
[98]
|
|
|
48
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.ProductPlantSnapshotEnhancementImpl.withProductPlant(ProductPlantSnapshotEnhancementImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:73)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:54
|
0.8 s
|
[95][96][97]
|
|
|
44
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.MarketUnitDeterminationImpl.getMarketUnitRow(MarketUnitDeterminationImpl.java:54)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.MarketUnitDeterminationImpl.determineMarketUnit(MarketUnitDeterminationImpl.java:27)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.DataSnapshotGenerationImpl.prepareDataSnapshot(DataSnapshotGenerationImpl.java:69)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapshotInternal(CreateDataSnapshotForShardActivityImpl.java:53)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateDataSnapshotForShardActivityImpl.bufferDataSnapShot(CreateDataSnapshotForShardActivityImpl.java:38)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 06:55:50
|
2 s
|
[92][93]
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
45
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:55:49
|
0.8 s
|
[90]
|
|
46
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:55:49
|
19 ms
|
|
|
47
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:55:49
|
0.6 s
|
[91]
|
|
|
43
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:55:48
|
0.2 s
|
[89]
|
|
|
42
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = BufferDataSnapShot activityId = 4ed5a2f4-9ab7-3eed-9f6f-05c008033ee1 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:48
|
0.2 s
|
[88]
|
|
|
41
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = 26b1f0cb-25a8-3510-a51b-f851585c09a0 workflowType = ProcessWorkloadShardWorkflow workflowId = 06426dca-6324-34ce-8244-a807b69efd73 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = 26b1f0cb-25a8-3510-a51b-f851585c09a0 workflowType = ProcessWorkloadShardWorkflow workflowId = 06426dca-6324-34ce-8244-a807b69efd73 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardActivityImpl.createProdPlantList(CreateProdPlantListForShardActivityImpl.java:45)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:22
|
1 s
|
[83][84][85][86][87]
|
|
|
33
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 0de0027e-3381-39f5-98da-ba752ae6d9c2 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 0de0027e-3381-39f5-98da-ba752ae6d9c2 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationActivityImpl.determineSupplyNetworkList(SupplyNetworkForShardCreationActivityImpl.java:44)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:15
|
1 s
|
[70][71][72][73][74]
|
|
|
32
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 0de0027e-3381-39f5-98da-ba752ae6d9c2 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = DetermineSupplyNetworkList activityId = 0de0027e-3381-39f5-98da-ba752ae6d9c2 workflowType = PrepareDataSnapshotWorkflow workflowId = 40151c89-de61-38cf-b7f5-9acab77d9e03 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeSupplyNetworkList(FileStorageAdapterImpl.java:210)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationDataAccessImpl.storeSupplyNetworkList(SupplyNetworkForShardCreationDataAccessImpl.java:29)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.SupplyNetworkForShardCreationImpl.createSupplyNetworkList(SupplyNetworkForShardCreationImpl.java:17)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.SupplyNetworkForShardCreationActivityImpl.determineSupplyNetworkList(SupplyNetworkForShardCreationActivityImpl.java:42)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:14
|
2 s
|
[66][67][69]
|
|
|
30
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = 26b1f0cb-25a8-3510-a51b-f851585c09a0 workflowType = ProcessWorkloadShardWorkflow workflowId = 06426dca-6324-34ce-8244-a807b69efd73 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateProdPlantList activityId = 26b1f0cb-25a8-3510-a51b-f851585c09a0 workflowType = ProcessWorkloadShardWorkflow workflowId = 06426dca-6324-34ce-8244-a807b69efd73 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeProdPlantListForShard(FileStorageAdapterImpl.java:190)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardDataAccessImpl.storeProdPlantList(CreateProdPlantListForShardDataAccessImpl.java:29)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.datasnapshot.ProdPlantListForShardCreationImpl.createProdPlantList(ProdPlantListForShardCreationImpl.java:30)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.CreateProdPlantListForShardActivityImpl.createProdPlantList(CreateProdPlantListForShardActivityImpl.java:43)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:55:13
|
9 s
|
[64][65]
|
|
|
29
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = PrepareStreams activityId = a7c764b1-63a0-3ba4-97a6-7e6f12b3636f workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = PrepareStreams activityId = a7c764b1-63a0-3ba4-97a6-7e6f12b3636f workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.OrderQuantityOptimizationStreamingActivityImpl.prepareStreams(OrderQuantityOptimizationStreamingActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:56
|
1 s
|
[57][58][59][60][61]
|
|
|
24
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:64)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:54
|
1 s
|
[49][50][51][52][53]
|
|
|
23
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeShardedWorklist(FileStorageAdapterImpl.java:250)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:45)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 06:54:52
|
1 s
|
[47][48]
|
|
|
22
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:3586)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.urlByShard(FileStorageAdapterImpl.java:260)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeShardedWorklist(FileStorageAdapterImpl.java:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:26)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistShardGenerationDataAccessImpl.storeShardWorklists(WorklistShardGenerationDataAccessImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:45)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
|
2026/02/06 06:54:52
|
0.6 s
|
[42][43][44][45][46]
|
|
|
21
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.count(Dataset.scala:3615)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.calculateBinPackingMetrics(WorklistShardingImpl.java:108)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.performBinPacking(WorklistShardingImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:38)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:51
|
0.3 s
|
[39][40][41]
|
|
|
20
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.Dataset.count(Dataset.scala:3615)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.calculateBinPackingMetrics(WorklistShardingImpl.java:107)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.performBinPacking(WorklistShardingImpl.java:95)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistShardingImpl.createWorklistShards(WorklistShardingImpl.java:38)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.OrderProposalGroupingShardingActivityImpl.createOrderProposalShards(OrderProposalGroupingShardingActivityImpl.java:62)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:50
|
1.0 s
|
[29][30][31][32][33][34][35][36][37][38]
|
|
|
19
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:50
|
0.4 s
|
[28]
|
|
|
17
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:49
|
0.3 s
|
[27]
|
|
|
16
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:54:48
|
0.3 s
|
[26]
|
|
|
15
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:48
|
0.2 s
|
[25]
|
|
|
14
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storePerformanceMetrics(FileStorageAdapterImpl.java:755)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementActivityImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementActivityImpl.java:50)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:25
|
2 s
|
[18][19][20][21][22]
|
|
|
3
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:54:13
|
0.4 s
|
[4]
|
|
|
2
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:13
|
0.3 s
|
[3]
|
|
|
1
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.DeltaLog$.recordOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.DeltaLog$.recordDeltaOperation(DeltaLog.scala:677)
org.apache.spark.sql.delta.DeltaLog$.createDeltaLog$1(DeltaLog.scala:922)
org.apache.spark.sql.delta.DeltaLog$.$anonfun$apply$5(DeltaLog.scala:942)
com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4860)
com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3551)
com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2302)
com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2177)
com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2068)
com.google.common.cache.LocalCache.get(LocalCache.java:3986)
com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4855)
org.apache.spark.sql.delta.DeltaLog$.getDeltaLogFromCache$1(DeltaLog.scala:941)
org.apache.spark.sql.delta.DeltaLog$.apply(DeltaLog.scala:951)
org.apache.spark.sql.delta.DeltaLog$.forTable(DeltaLog.scala:782)
|
2026/02/06 06:54:08
|
3 s
|
[1]
|
|
|
0
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.util.threads.DeltaThreadPool.$anonfun$submit$1(DeltaThreadPool.scala:39)
java.base/java.util.concurrent.FutureTask.run(Unknown Source)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:77)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:60)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:116)
org.apache.spark.sql.delta.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:119)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
java.base/java.lang.Thread.run(Unknown Source)
|
2026/02/06 06:54:08
|
3 s
|
[0]
|
|
|
4
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:561)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeGroupingWorklist(FileStorageAdapterImpl.java:472)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementDataAccessImpl.storeGroupingWorklist(WorklistAttributeEnhancementDataAccessImpl.java:37)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistAttributeEnhancementImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementActivityImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementActivityImpl.java:48)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 06:54:14
|
11 s
|
|
+details
|
|
|
| ID | Description | Submitted | Duration | Job IDs |
|
5
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:243)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.writeToStorage(FileStorageAdapterImpl.java:561)
com.sap.s4hana.eureka.business.crporderqtyoptservice.storageaccess.FileStorageAdapterImpl.storeGroupingWorklist(FileStorageAdapterImpl.java:472)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementDataAccessImpl.storeGroupingWorklist(WorklistAttributeEnhancementDataAccessImpl.java:37)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.business.grouping.WorklistAttributeEnhancementImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementImpl.java:35)
com.sap.s4hana.eureka.business.crporderqtyoptservice.core.controller.grouping.WorklistAttributeEnhancementActivityImpl.enhanceWorklistByAttributes(WorklistAttributeEnhancementActivityImpl.java:48)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
java.base/java.lang.reflect.Method.invoke(Unknown Source)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor$POJOActivityInboundCallsInterceptor.executeActivity(RootActivityInboundCallsInterceptor.java:44)
io.temporal.internal.activity.RootActivityInboundCallsInterceptor.execute(RootActivityInboundCallsInterceptor.java:23)
io.temporal.internal.activity.ActivityTaskExecutors$BaseActivityTaskExecutor.execute(ActivityTaskExecutors.java:88)
io.temporal.internal.activity.ActivityTaskHandlerImpl.handle(ActivityTaskHandlerImpl.java:105)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handleActivity(ActivityWorker.java:294)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:258)
io.temporal.internal.worker.ActivityWorker$TaskHandlerImpl.handle(ActivityWorker.java:221)
io.temporal.internal.worker.PollTaskExecutor.lambda$process$1(PollTaskExecutor.java:76)
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
|
2026/02/06 06:54:20
|
3 s
|
[9][11][12][13][14][15][16]
|
|
6
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:15
|
2 s
|
[5]
|
|
7
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:17
|
0.1 s
|
|
|
8
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:17
|
2 s
|
[6]
|
|
9
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:19
|
0.3 s
|
[7]
|
|
10
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:19
|
64 ms
|
|
|
11
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:20
|
0.8 s
|
[8]
|
|
12
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PreparedDeltaFileIndex.matchingFiles(PrepareDeltaScan.scala:386)
org.apache.spark.sql.delta.files.TahoeFileIndex.listAddFiles(TahoeFileIndex.scala:110)
org.apache.spark.sql.delta.files.TahoeFileIndex.listFiles(TahoeFileIndex.scala:102)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions(DataSourceScanExec.scala:256)
org.apache.spark.sql.execution.FileSourceScanLike.selectedPartitions$(DataSourceScanExec.scala:251)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions$lzycompute(DataSourceScanExec.scala:506)
org.apache.spark.sql.execution.FileSourceScanExec.selectedPartitions(DataSourceScanExec.scala:506)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions(DataSourceScanExec.scala:286)
org.apache.spark.sql.execution.FileSourceScanLike.dynamicallySelectedPartitions$(DataSourceScanExec.scala:267)
|
2026/02/06 06:54:21
|
0.8 s
|
[10]
|
|
13
|
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD
+details
replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = EnhanceWorklistByAttributes activityId = b191ef00-edf7-38ea-9e23-db676456ad33 workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.OptimisticTransaction.recordOperation(OptimisticTransaction.scala:165)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.OptimisticTransaction.recordDeltaOperation(OptimisticTransaction.scala:165)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$doCommitRetryIteratively$1(OptimisticTransaction.scala:2223)
org.apache.spark.sql.delta.OptimisticTransactionImpl.lockCommitIfEnabled(OptimisticTransaction.scala:2194)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively(OptimisticTransaction.scala:2210)
org.apache.spark.sql.delta.OptimisticTransactionImpl.doCommitRetryIteratively$(OptimisticTransaction.scala:2206)
org.apache.spark.sql.delta.OptimisticTransaction.doCommitRetryIteratively(OptimisticTransaction.scala:165)
org.apache.spark.sql.delta.OptimisticTransactionImpl.liftedTree1$1(OptimisticTransaction.scala:1434)
org.apache.spark.sql.delta.OptimisticTransactionImpl.$anonfun$commitImpl$1(OptimisticTransaction.scala:1330)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:171)
org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:169)
org.apache.spark.sql.delta.OptimisticTransaction.recordFrameProfile(OptimisticTransaction.scala:165)
org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
|
2026/02/06 06:54:24
|
52 ms
|
[17]
|
|
|
18
|
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query
+details
Delta: replenishmentRunId = 10000000095 tenantId = 6944346648028224943 activityType = CreateOrderProposalShards activityId = 26269996-b72a-3f7e-9814-66d8981041bd workflowType = CalculateOrderProposalsWorkflow workflowId = eba25ea7-bf45-3f36-ad32-cdab1a2c01e5 attempt = 1 cornerstoneTenantId = 8378 marketUnit = Daily_grocery scenario = STANDARD: Filtering files for query org.apache.spark.sql.delta.metering.DeltaLogging.$anonfun$recordDeltaOperationInternal$1(DeltaLogging.scala:139)
com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:128)
com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:117)
org.apache.spark.sql.delta.Snapshot.recordOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperationInternal(DeltaLogging.scala:138)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:128)
org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:118)
org.apache.spark.sql.delta.Snapshot.recordDeltaOperation(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan(DataSkippingReader.scala:1243)
org.apache.spark.sql.delta.stats.DataSkippingReaderBase.filesForScan$(DataSkippingReader.scala:1184)
org.apache.spark.sql.delta.Snapshot.filesForScan(Snapshot.scala:81)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.$anonfun$filesForScan$1(PrepareDeltaScan.scala:132)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:56)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:35)
org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:29)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.withStatusCode(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan(PrepareDeltaScan.scala:117)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase.filesForScan$(PrepareDeltaScan.scala:112)
org.apache.spark.sql.delta.stats.PrepareDeltaScan.filesForScan(PrepareDeltaScan.scala:306)
org.apache.spark.sql.delta.stats.PrepareDeltaScanBase$$anonfun$prepareDeltaScan$1.$anonfun$applyOrElse$1(PrepareDeltaScan.scala:150)
|
2026/02/06 06:54:50
|
22 ms
|
|
|