|
| 1 | +/* |
| 2 | +* Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | +* contributor license agreements. See the NOTICE file distributed with |
| 4 | +* this work for additional information regarding copyright ownership. |
| 5 | +* The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | +* (the "License"); you may not use this file except in compliance with |
| 7 | +* the License. You may obtain a copy of the License at |
| 8 | +* |
| 9 | +* http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +* |
| 11 | +* Unless required by applicable law or agreed to in writing, software |
| 12 | +* distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | +* See the License for the specific language governing permissions and |
| 15 | +* limitations under the License. |
| 16 | +*/ |
| 17 | + |
| 18 | +using System; |
| 19 | +using System.Collections.Generic; |
| 20 | +using System.Diagnostics; |
| 21 | +using System.IO; |
| 22 | +using System.Text.Json; |
| 23 | +using System.Threading; |
| 24 | +using System.Threading.Tasks; |
| 25 | +using Apache.Arrow.Adbc.Drivers.Apache.Spark; |
| 26 | +using Apache.Arrow.Adbc.Drivers.Databricks; |
| 27 | +using Apache.Arrow.Ipc; |
| 28 | +using BenchmarkDotNet.Attributes; |
| 29 | +using BenchmarkDotNet.Columns; |
| 30 | +using BenchmarkDotNet.Reports; |
| 31 | +using BenchmarkDotNet.Running; |
| 32 | + |
| 33 | +namespace Apache.Arrow.Adbc.Benchmarks.Databricks |
| 34 | +{ |
| 35 | + /// <summary> |
| 36 | + /// Custom column to display peak memory usage in the benchmark results table. |
| 37 | + /// </summary> |
| 38 | + public class PeakMemoryColumn : IColumn |
| 39 | + { |
| 40 | + public string Id => nameof(PeakMemoryColumn); |
| 41 | + public string ColumnName => "Peak Memory (MB)"; |
| 42 | + public string Legend => "Peak working set memory during benchmark execution"; |
| 43 | + public UnitType UnitType => UnitType.Size; |
| 44 | + public bool AlwaysShow => true; |
| 45 | + public ColumnCategory Category => ColumnCategory.Custom; |
| 46 | + public int PriorityInCategory => 0; |
| 47 | + public bool IsNumeric => true; |
| 48 | + public bool IsAvailable(Summary summary) => true; |
| 49 | + public bool IsDefault(Summary summary, BenchmarkCase benchmarkCase) => false; |
| 50 | + |
| 51 | + public string GetValue(Summary summary, BenchmarkCase benchmarkCase) |
| 52 | + { |
| 53 | + // Try CloudFetchRealE2EBenchmark (includes parameters in key) |
| 54 | + if (benchmarkCase.Descriptor.Type == typeof(CloudFetchRealE2EBenchmark)) |
| 55 | + { |
| 56 | + // Extract ReadDelayMs parameter |
| 57 | + var readDelayParam = benchmarkCase.Parameters["ReadDelayMs"]; |
| 58 | + string key = $"ExecuteLargeQuery_{readDelayParam}"; |
| 59 | + if (CloudFetchRealE2EBenchmark.PeakMemoryResults.TryGetValue(key, out var peakMemoryMB)) |
| 60 | + { |
| 61 | + return $"{peakMemoryMB:F2}"; |
| 62 | + } |
| 63 | + } |
| 64 | + |
| 65 | + return "See previous console output"; |
| 66 | + } |
| 67 | + |
| 68 | + public string GetValue(Summary summary, BenchmarkCase benchmarkCase, SummaryStyle style) |
| 69 | + { |
| 70 | + return GetValue(summary, benchmarkCase); |
| 71 | + } |
| 72 | + |
| 73 | + public override string ToString() => ColumnName; |
| 74 | + } |
| 75 | + |
| 76 | + /// <summary> |
| 77 | + /// Configuration model for Databricks test configuration JSON file. |
| 78 | + /// </summary> |
| 79 | + internal class DatabricksTestConfig |
| 80 | + { |
| 81 | + public string? uri { get; set; } |
| 82 | + public string? token { get; set; } |
| 83 | + public string? query { get; set; } |
| 84 | + public string? type { get; set; } |
| 85 | + public string? catalog { get; set; } |
| 86 | + public string? schema { get; set; } |
| 87 | + } |
| 88 | + |
| 89 | + /// <summary> |
| 90 | + /// Real E2E performance benchmark for Databricks CloudFetch with actual cluster. |
| 91 | + /// |
| 92 | + /// Prerequisites: |
| 93 | + /// - Set DATABRICKS_TEST_CONFIG_FILE environment variable |
| 94 | + /// - Config file should contain cluster connection details |
| 95 | + /// |
| 96 | + /// Run with: dotnet run -c Release --project Benchmarks/Benchmarks.csproj --framework net8.0 -- --filter "*CloudFetchRealE2E*" --job dry |
| 97 | + /// |
| 98 | + /// Measures: |
| 99 | + /// - Peak memory usage |
| 100 | + /// - Total allocations |
| 101 | + /// - GC collections |
| 102 | + /// - Query execution time |
| 103 | + /// - Row processing throughput |
| 104 | + /// |
| 105 | + /// Parameters: |
| 106 | + /// - ReadDelayMs: Fixed at 5 milliseconds per 10K rows to simulate Power BI consumption |
| 107 | + /// </summary> |
| 108 | + [MemoryDiagnoser] |
| 109 | + [GcServer(true)] |
| 110 | + [SimpleJob(warmupCount: 1, iterationCount: 3)] |
| 111 | + [MinColumn, MaxColumn, MeanColumn, MedianColumn] |
| 112 | + public class CloudFetchRealE2EBenchmark |
| 113 | + { |
| 114 | + // Static dictionary to store peak memory results for the custom column |
| 115 | + public static readonly Dictionary<string, double> PeakMemoryResults = new Dictionary<string, double>(); |
| 116 | + |
| 117 | + private AdbcConnection? _connection; |
| 118 | + private Process _currentProcess = null!; |
| 119 | + private long _peakMemoryBytes; |
| 120 | + private DatabricksTestConfig _testConfig = null!; |
| 121 | + private string _hostname = null!; |
| 122 | + private string _httpPath = null!; |
| 123 | + |
| 124 | + [Params(5)] // Read delay in milliseconds per 10K rows (5 = simulate Power BI) |
| 125 | + public int ReadDelayMs { get; set; } |
| 126 | + |
| 127 | + [GlobalSetup] |
| 128 | + public void GlobalSetup() |
| 129 | + { |
| 130 | + // Check if Databricks config is available |
| 131 | + string? configFile = Environment.GetEnvironmentVariable("DATABRICKS_TEST_CONFIG_FILE"); |
| 132 | + if (string.IsNullOrEmpty(configFile)) |
| 133 | + { |
| 134 | + throw new InvalidOperationException( |
| 135 | + "DATABRICKS_TEST_CONFIG_FILE environment variable must be set. " + |
| 136 | + "Set it to the path of your Databricks test configuration JSON file."); |
| 137 | + } |
| 138 | + |
| 139 | + // Read and parse config file |
| 140 | + string configJson = File.ReadAllText(configFile); |
| 141 | + _testConfig = JsonSerializer.Deserialize<DatabricksTestConfig>(configJson) |
| 142 | + ?? throw new InvalidOperationException("Failed to parse config file"); |
| 143 | + |
| 144 | + if (string.IsNullOrEmpty(_testConfig.uri) || string.IsNullOrEmpty(_testConfig.token)) |
| 145 | + { |
| 146 | + throw new InvalidOperationException("Config file must contain 'uri' and 'token' fields"); |
| 147 | + } |
| 148 | + |
| 149 | + if (string.IsNullOrEmpty(_testConfig.query)) |
| 150 | + { |
| 151 | + throw new InvalidOperationException("Config file must contain 'query' field"); |
| 152 | + } |
| 153 | + |
| 154 | + // Parse URI to extract hostname and http_path |
| 155 | + // Format: https://hostname/sql/1.0/warehouses/xxx |
| 156 | + var uri = new Uri(_testConfig.uri); |
| 157 | + _hostname = uri.Host; |
| 158 | + _httpPath = uri.PathAndQuery; |
| 159 | + |
| 160 | + _currentProcess = Process.GetCurrentProcess(); |
| 161 | + Console.WriteLine($"Loaded config from: {configFile}"); |
| 162 | + Console.WriteLine($"Hostname: {_hostname}"); |
| 163 | + Console.WriteLine($"HTTP Path: {_httpPath}"); |
| 164 | + Console.WriteLine($"Query: {_testConfig.query}"); |
| 165 | + Console.WriteLine($"Benchmark will test CloudFetch with {ReadDelayMs}ms per 10K rows read delay"); |
| 166 | + } |
| 167 | + |
| 168 | + [IterationSetup] |
| 169 | + public void IterationSetup() |
| 170 | + { |
| 171 | + // Create connection for this iteration using config values |
| 172 | + var parameters = new Dictionary<string, string> |
| 173 | + { |
| 174 | + [AdbcOptions.Uri] = _testConfig.uri!, |
| 175 | + [SparkParameters.Token] = _testConfig.token!, |
| 176 | + [DatabricksParameters.UseCloudFetch] = "true", |
| 177 | + [DatabricksParameters.EnableDirectResults] = "true", |
| 178 | + [DatabricksParameters.CanDecompressLz4] = "true", |
| 179 | + [DatabricksParameters.MaxBytesPerFile] = "10485760", // 10MB per file |
| 180 | + }; |
| 181 | + |
| 182 | + var driver = new DatabricksDriver(); |
| 183 | + var database = driver.Open(parameters); |
| 184 | + _connection = database.Connect(parameters); |
| 185 | + |
| 186 | + // Reset peak memory tracking |
| 187 | + GC.Collect(2, GCCollectionMode.Forced, blocking: true, compacting: false); |
| 188 | + GC.WaitForPendingFinalizers(); |
| 189 | + GC.Collect(2, GCCollectionMode.Forced, blocking: true, compacting: false); |
| 190 | + _currentProcess.Refresh(); |
| 191 | + _peakMemoryBytes = _currentProcess.WorkingSet64; |
| 192 | + } |
| 193 | + |
| 194 | + [IterationCleanup] |
| 195 | + public void IterationCleanup() |
| 196 | + { |
| 197 | + _connection?.Dispose(); |
| 198 | + _connection = null; |
| 199 | + |
| 200 | + // Print and store peak memory for this iteration |
| 201 | + double peakMemoryMB = _peakMemoryBytes / 1024.0 / 1024.0; |
| 202 | + Console.WriteLine($"CloudFetch E2E [Delay={ReadDelayMs}ms/10K rows] - Peak memory: {peakMemoryMB:F2} MB"); |
| 203 | + |
| 204 | + // Store in static dictionary for the custom column (key includes parameter) |
| 205 | + string key = $"ExecuteLargeQuery_{ReadDelayMs}"; |
| 206 | + PeakMemoryResults[key] = peakMemoryMB; |
| 207 | + } |
| 208 | + |
| 209 | + /// <summary> |
| 210 | + /// Execute a large query against Databricks and consume all result batches. |
| 211 | + /// Simulates client behavior like Power BI reading data. |
| 212 | + /// Uses the query from the config file. |
| 213 | + /// </summary> |
| 214 | + [Benchmark] |
| 215 | + public async Task<long> ExecuteLargeQuery() |
| 216 | + { |
| 217 | + if (_connection == null) |
| 218 | + { |
| 219 | + throw new InvalidOperationException("Connection not initialized"); |
| 220 | + } |
| 221 | + |
| 222 | + // Execute query from config file |
| 223 | + var statement = _connection.CreateStatement(); |
| 224 | + statement.SqlQuery = _testConfig.query; |
| 225 | + |
| 226 | + var result = await statement.ExecuteQueryAsync(); |
| 227 | + if (result.Stream == null) |
| 228 | + { |
| 229 | + throw new InvalidOperationException("Result stream is null"); |
| 230 | + } |
| 231 | + |
| 232 | + // Read all batches and track peak memory |
| 233 | + long totalRows = 0; |
| 234 | + long totalBatches = 0; |
| 235 | + RecordBatch? batch; |
| 236 | + |
| 237 | + while ((batch = await result.Stream.ReadNextRecordBatchAsync()) != null) |
| 238 | + { |
| 239 | + totalRows += batch.Length; |
| 240 | + totalBatches++; |
| 241 | + |
| 242 | + // Track peak memory periodically |
| 243 | + if (totalBatches % 10 == 0) |
| 244 | + { |
| 245 | + TrackPeakMemory(); |
| 246 | + } |
| 247 | + |
| 248 | + // Simulate Power BI processing delay if configured |
| 249 | + // Delay is proportional to batch size: ReadDelayMs per 10K rows |
| 250 | + if (ReadDelayMs > 0) |
| 251 | + { |
| 252 | + int delayForBatch = (int)((batch.Length / 10000.0) * ReadDelayMs); |
| 253 | + if (delayForBatch > 0) |
| 254 | + { |
| 255 | + Thread.Sleep(delayForBatch); |
| 256 | + } |
| 257 | + } |
| 258 | + |
| 259 | + batch.Dispose(); |
| 260 | + } |
| 261 | + |
| 262 | + // Final peak memory check |
| 263 | + TrackPeakMemory(); |
| 264 | + |
| 265 | + statement.Dispose(); |
| 266 | + return totalRows; |
| 267 | + } |
| 268 | + |
| 269 | + private void TrackPeakMemory() |
| 270 | + { |
| 271 | + _currentProcess.Refresh(); |
| 272 | + long currentMemory = _currentProcess.WorkingSet64; |
| 273 | + if (currentMemory > _peakMemoryBytes) |
| 274 | + { |
| 275 | + _peakMemoryBytes = currentMemory; |
| 276 | + } |
| 277 | + } |
| 278 | + |
| 279 | + [GlobalCleanup] |
| 280 | + public void GlobalCleanup() |
| 281 | + { |
| 282 | + GC.Collect(2, GCCollectionMode.Forced, blocking: true, compacting: true); |
| 283 | + GC.WaitForPendingFinalizers(); |
| 284 | + } |
| 285 | + } |
| 286 | +} |
0 commit comments