diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 00000000..ce86ed39 --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,45 @@ +name: Benchmarks + +on: + workflow_call: + inputs: + benchmark_package_path: + type: string + description: "Path to the directory containing the benchmarking package. Defaults to ." + default: "." + swift_package_arguments: + type: string + description: "Arguments to the switch package command invocation e.g. `--disable-sandbox`" + linux_5_9_enabled: + type: boolean + description: "Boolean to enable the Linux 5.9 Swift version matrix job. Defaults to true." + default: true + linux_5_10_enabled: + type: boolean + description: "Boolean to enable the Linux 5.10 Swift version matrix job. Defaults to true." + default: true + linux_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux nightly 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the Linux nightly main Swift version matrix job. Defaults to true." + default: true + +jobs: + benchmarks: + name: Benchmarks + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Benchmarks" + matrix_linux_command: "apt-get update -y -q && apt-get install -y -q libjemalloc-dev && apt-get -y install libsasl2-dev && swift package --package-path ${{ inputs.benchmark_package_path }} ${{ inputs.swift_package_arguments }} benchmark baseline check --check-absolute-path ${{ inputs.benchmark_package_path }}/Thresholds/${SWIFT_VERSION}/" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} diff --git a/.github/workflows/cxx_interop.yml b/.github/workflows/cxx_interop.yml new file mode 100644 index 00000000..ce27f060 --- /dev/null +++ b/.github/workflows/cxx_interop.yml @@ -0,0 +1,38 @@ +name: Cxx interop + +on: + workflow_call: + inputs: + linux_5_9_enabled: + type: boolean + description: "Boolean to enable the Linux 5.9 Swift version matrix job. Defaults to true." + default: true + linux_5_10_enabled: + type: boolean + description: "Boolean to enable the Linux 5.10 Swift version matrix job. Defaults to true." + default: true + linux_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux nightly 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the Linux nightly main Swift version matrix job. Defaults to true." + default: true + +jobs: + cxx-interop: + name: Cxx interop + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Cxx interop" + matrix_linux_command: "apt-get update -y -q && apt-get install -y -q jq && apt-get -y install libsasl2-dev && curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-cxx-interop-compatibility.sh | bash" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b2f403c3..983b1e19 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,3 +16,13 @@ jobs: linux_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_main_arguments_override: "--explicit-target-dependency-import-check error" + + benchmarks: + name: Benchmarks + uses: ./.github/workflows/benchmarks.yml + with: + benchmark_package_path: "Benchmarks" + + cxx-interop: + name: Cxx interop + uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 2c6a87a4..fa7d2d14 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -23,17 +23,12 @@ jobs: linux_nightly_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_main_arguments_override: "--explicit-target-dependency-import-check error" + benchmarks: + name: Benchmarks + uses: ./.github/workflows/benchmarks.yml + with: + benchmark_package_path: "Benchmarks" + cxx-interop: name: Cxx interop - uses: apple/swift-nio/.github/workflows/swift_matrix.yml@main - with: - name: "Cxx interop" - matrix_linux_command: "apt-get update -y -q && apt-get install -y -q jq && apt-get -y install libsasl2-dev && curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-cxx-interop-compatibility.sh | bash" - matrix_linux_5_9_enabled: true - matrix_linux_5_10_enabled: true - matrix_linux_6_0_enabled: true - matrix_linux_nightly_6_0_enabled: true - matrix_linux_nightly_main_enabled: true - matrix_windows_6_0_enabled: false - matrix_windows_nightly_6_0_enabled: false - matrix_windows_nightly_main_enabled: false + uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/swift_matrix.yml b/.github/workflows/swift_matrix.yml new file mode 100644 index 00000000..612718de --- /dev/null +++ b/.github/workflows/swift_matrix.yml @@ -0,0 +1,133 @@ +name: Matrix + +on: + workflow_call: + inputs: + name: + type: string + description: "The name of the workflow used for the concurrency group." + required: true + matrix_linux_command: + type: string + description: "The command of the current Swift version linux matrix job to execute." + required: true + matrix_linux_5_9_enabled: + type: boolean + description: "Boolean to enable the 5.9 Swift version matrix job. Defaults to true." + default: true + matrix_linux_5_9_container_image: + type: string + description: "Container image for the 5.9 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:5.9-jammy" + matrix_linux_5_9_command_override: + type: string + description: "The command of the 5.9 Swift version linux matrix job to execute." + matrix_linux_5_10_enabled: + type: boolean + description: "Boolean to enable the 5.10 Swift version matrix job. Defaults to true." + default: true + matrix_linux_5_10_container_image: + type: string + description: "Container image for the 5.10 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:5.10-jammy" + matrix_linux_5_10_command_override: + type: string + description: "The command of the 5.10 Swift version linux matrix job to execute." + matrix_linux_6_0_enabled: + type: boolean + description: "Boolean to enable the 6.0 Swift version matrix job. Defaults to true." + default: true + matrix_linux_6_0_container_image: + type: string + description: "Container image for the 6.0 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:6.0-jammy" + matrix_linux_6_0_command_override: + type: string + description: "The command of the 6.0 Swift version linux matrix job to execute." + matrix_linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the nightly 6.0 Swift version matrix job. Defaults to true." + default: true + matrix_linux_nightly_6_0_container_image: + type: string + description: "Container image for the nightly 6.0 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swiftlang/swift:nightly-6.0-jammy" + matrix_linux_nightly_6_0_command_override: + type: string + description: "The command of the nightly 6.0 Swift version linux matrix job to execute." + matrix_linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the nightly main Swift version matrix job. Defaults to true." + default: true + matrix_linux_nightly_main_container_image: + type: string + description: "Container image for the nightly main Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swiftlang/swift:nightly-main-jammy" + matrix_linux_nightly_main_command_override: + type: string + description: "The command of the nightly main Swift version linux matrix job to execute." + +# We are cancelling previously triggered workflow runs +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.name }} + cancel-in-progress: true + +jobs: + linux: + name: Linux (${{ matrix.swift.swift_version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + # We are specifying only the major and minor of the docker images to automatically pick up the latest patch release + swift: + - image: ${{ inputs.matrix_linux_5_9_container_image }} + swift_version: "5.9" + enabled: ${{ inputs.matrix_linux_5_9_enabled }} + - image: ${{ inputs.matrix_linux_5_10_container_image }} + swift_version: "5.10" + enabled: ${{ inputs.matrix_linux_5_10_enabled }} + - image: ${{ inputs.matrix_linux_6_0_container_image }} + swift_version: "6.0" + enabled: ${{ inputs.matrix_linux_6_0_enabled }} + - image: ${{ inputs.matrix_linux_nightly_6_0_container_image }} + swift_version: "nightly-6.0" + enabled: ${{ inputs.matrix_linux_nightly_6_0_enabled }} + - image: ${{ inputs.matrix_linux_nightly_main_container_image }} + swift_version: "nightly-main" + enabled: ${{ inputs.matrix_linux_nightly_main_enabled }} + container: + image: ${{ matrix.swift.image }} + steps: + - name: Checkout repository + if: ${{ matrix.swift.enabled }} + uses: actions/checkout@v4 + with: + persist-credentials: false + submodules: true + - name: Mark the workspace as safe + if: ${{ matrix.swift.enabled }} + # https://github.com/actions/checkout/issues/766 + run: git config --global --add safe.directory ${GITHUB_WORKSPACE} + - name: Run matrix job + if: ${{ matrix.swift.enabled }} + env: + SWIFT_VERSION: ${{ matrix.swift.swift_version }} + COMMAND: ${{ inputs.matrix_linux_command }} + COMMAND_OVERRIDE_5_9: ${{ inputs.matrix_linux_5_9_command_override }} + COMMAND_OVERRIDE_5_10: ${{ inputs.matrix_linux_5_10_command_override }} + COMMAND_OVERRIDE_6_0: ${{ inputs.matrix_linux_6_0_command_override }} + COMMAND_OVERRIDE_NIGHTLY_6_0: ${{ inputs.matrix_linux_nightly_6_0_command_override }} + COMMAND_OVERRIDE_NIGHTLY_MAIN: ${{ inputs.matrix_linux_nightly_main_command_override }} + run: | + apt-get -qq update && apt-get -qq -y install curl && apt-get -y install libsasl2-dev + curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-matrix-job.sh | bash + services: + zookeeper: + image: ubuntu/zookeeper + kafka: + image: ubuntu/kafka + env: + ZOOKEEPER_HOST: zookeeper + env: + KAFKA_HOST: kafka diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 3300a33b..96773e82 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -47,59 +47,17 @@ on: jobs: unit-tests: name: Unit tests - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - # We are specifying only the major and minor of the docker images to automatically pick up the latest patch release - swift: - - image: "swift:5.9-jammy" - swift_version: "5.9" - enabled: ${{ inputs.linux_5_9_enabled }} - - image: "swift:5.10-jammy" - swift_version: "5.10" - enabled: ${{ inputs.linux_5_10_enabled }} - - image: "swift:6.0-jammy" - swift_version: "6.0" - enabled: ${{ inputs.linux_6_0_enabled }} - - image: "swiftlang/swift:nightly-6.0-jammy" - swift_version: "nightly-6.0" - enabled: ${{ inputs.linux_nightly_6_0_enabled }} - - image: "swiftlang/swift:nightly-main-jammy" - swift_version: "nightly-main" - enabled: ${{ inputs.linux_nightly_main_enabled }} - steps: - - name: Checkout repository - if: ${{ matrix.swift.enabled }} - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: true - - name: Mark the workspace as safe - if: ${{ matrix.swift.enabled }} - # https://github.com/actions/checkout/issues/766 - run: git config --global --add safe.directory ${GITHUB_WORKSPACE} - - name: Run matrix job - if: ${{ matrix.swift.enabled }} - env: - SWIFT_VERSION: ${{ matrix.swift.swift_version }} - COMMAND: "swift test" - COMMAND_OVERRIDE_5_9: "swift test ${{ inputs.linux_5_9_arguments_override }}" - COMMAND_OVERRIDE_5_10: "swift test ${{ inputs.linux_5_10_arguments_override }}" - COMMAND_OVERRIDE_6_0: "swift test ${{ inputs.linux_6_0_arguments_override }}" - COMMAND_OVERRIDE_NIGHTLY_6_0: "swift test ${{ inputs.linux_nightly_6_0_arguments_override }}" - COMMAND_OVERRIDE_NIGHTLY_MAIN: "swift test ${{ inputs.linux_nightly_main_arguments_override }}" - run: | - apt-get -qq update && apt-get -qq -y install curl && apt-get -y install libsasl2-dev - curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-matrix-job.sh | bash - container: - image: ${{ matrix.swift.image }} - services: - zookeeper: - image: ubuntu/zookeeper - kafka: - image: ubuntu/kafka - env: - ZOOKEEPER_HOST: zookeeper - env: - KAFKA_HOST: kafka + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Unit tests" + matrix_linux_command: "swift test" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_9_command_override: "swift test ${{ inputs.linux_5_9_arguments_override }}" + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_5_10_command_override: "swift test ${{ inputs.linux_5_10_arguments_override }}" + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_6_0_command_override: "swift test ${{ inputs.linux_6_0_arguments_override }}" + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_6_0_command_override: "swift test ${{ inputs.linux_nightly_6_0_arguments_override }}" + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} + matrix_linux_nightly_main_command_override: "swift test ${{ inputs.linux_nightly_main_arguments_override }}" diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 9c49a2c3..837e4315 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -24,35 +24,27 @@ import struct Foundation.UUID let benchmarks = { var uniqueTestTopic: String! - let messageCount: UInt = 1000 + let numberOfPartitions: Int32 = 4 + // We perform every benchmark this many times + let numberOfBatches: UInt = 1000 + // In every benchmark iteration, we consume this many messages + let messageCountPerBatch: UInt = 1000 Benchmark.defaultConfiguration = .init( metrics: [ - .wallClock, - .cpuTotal, - .contextSwitches, - .throughput, - .allocatedResidentMemory, - ] + .arc, - warmupIterations: 0, - scalingFactor: .one, - maxDuration: .seconds(5), - maxIterations: 100, - thresholds: [ - .wallClock: .init(relative: [.p90: 35]), - .cpuTotal: .init(relative: [.p90: 35]), - .allocatedResidentMemory: .init(relative: [.p90: 20]), - .contextSwitches: .init(relative: [.p90: 35]), - .throughput: .init(relative: [.p90: 35]), - .objectAllocCount: .init(relative: [.p90: 20]), - .retainCount: .init(relative: [.p90: 20]), - .releaseCount: .init(relative: [.p90: 20]), - .retainReleaseDelta: .init(relative: [.p90: 20]), - ] + .mallocCountTotal, + ], + // We need to tell the benchmarking framework how often we are running the benchmark. + scalingFactor: .kilo, + maxDuration: .seconds(10_000_000), + maxIterations: 10 ) Benchmark.setup = { - uniqueTestTopic = try await prepareTopic(messagesCount: messageCount, partitions: 6) + uniqueTestTopic = try await prepareTopic( + messagesCount: messageCountPerBatch * numberOfBatches, + partitions: numberOfPartitions + ) } Benchmark.teardown = { @@ -62,7 +54,7 @@ let benchmarks = { uniqueTestTopic = nil } - Benchmark("SwiftKafkaConsumer_basic_consumer_messages_\(messageCount)") { benchmark in + Benchmark("SwiftKafkaConsumer_basic_consumer_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -93,31 +85,36 @@ let benchmarks = { defer { benchLog("Finish consuming") } - // Run Task + // Run task group.addTask { try await serviceGroup.run() } - // Second Consumer Task + // Consumer task group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 try await benchmark.withMeasurement { - for try await record in consumer.messages { - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break + for _ in benchmark.scaledIterations { + for try await record in consumer.messages { + counter += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } + if counter >= messageCountPerBatch { + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 + break + } } } } @@ -125,18 +122,18 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } - // Wait for second Consumer Task to complete + // Wait for consumer task to complete try await group.next() // Shutdown the serviceGroup await serviceGroup.triggerGracefulShutdown() } } - Benchmark("SwiftKafkaConsumer_with_offset_commit_messages_\(messageCount)") { benchmark in + Benchmark("SwiftKafkaConsumer_with_offset_commit_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -168,33 +165,38 @@ let benchmarks = { defer { benchLog("Finish consuming") } - // Run Task + // Run task group.addTask { try await serviceGroup.run() } - // Second Consumer Task + // Consumer task group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 try await benchmark.withMeasurement { - for try await record in consumer.messages { - try consumer.scheduleCommit(record) - - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break + for _ in benchmark.scaledIterations { + for try await record in consumer.messages { + try consumer.scheduleCommit(record) + + counter += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } + if counter >= messageCountPerBatch { + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 + break + } } } } @@ -202,18 +204,18 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } - // Wait for second Consumer Task to complete + // Wait for consumer cask to complete try await group.next() // Shutdown the serviceGroup await serviceGroup.triggerGracefulShutdown() } } - Benchmark("librdkafka_basic_consumer_messages_\(messageCount)") { benchmark in + Benchmark("librdkafka_basic_consumer_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString let rdKafkaConsumerConfig: [String: String] = [ "group.id": uniqueGroupID, @@ -248,106 +250,35 @@ let benchmarks = { rd_kafka_subscribe(kafkaHandle, subscriptionList) rd_kafka_poll(kafkaHandle, 0) - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - ctr += 1 - totalBytes += UInt64(record.pointee.len) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - } - } - - rd_kafka_consumer_close(kafkaHandle) - - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" - ) - } - - Benchmark("librdkafka_with_offset_commit_messages_\(messageCount)") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - "enable.auto.commit": "false", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } - - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } - - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) - defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) - } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 + for _ in benchmark.scaledIterations { + while counter < messageCountPerBatch { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + counter += 1 + totalBytes += UInt64(record.pointee.len) - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { - continue + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } } - let result = rd_kafka_commit_message(kafkaHandle, record, 0) - precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) - - ctr += 1 - totalBytes += UInt64(record.pointee.len) - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 } } @@ -356,7 +287,93 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } + + // Benchmark("librdkafka_with_offset_commit_messages_\(messageCountPerBatch)") { benchmark in + // let uniqueGroupID = UUID().uuidString + // let rdKafkaConsumerConfig: [String: String] = [ + // "group.id": uniqueGroupID, + // "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + // "broker.address.family": "v4", + // "auto.offset.reset": "beginning", + // "enable.auto.commit": "false", + // ] + + // let configPointer: OpaquePointer = rd_kafka_conf_new() + // for (key, value) in rdKafkaConsumerConfig { + // precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + // } + + // let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + // guard let kafkaHandle else { + // preconditionFailure("Kafka handle was not created") + // } + // defer { + // rd_kafka_destroy(kafkaHandle) + // } + + // rd_kafka_poll_set_consumer(kafkaHandle) + // let subscriptionList = rd_kafka_topic_partition_list_new(1) + // defer { + // rd_kafka_topic_partition_list_destroy(subscriptionList) + // } + // rd_kafka_topic_partition_list_add( + // subscriptionList, + // uniqueTestTopic, + // RD_KAFKA_PARTITION_UA + // ) + // rd_kafka_subscribe(kafkaHandle, subscriptionList) + // rd_kafka_poll(kafkaHandle, 0) + + // var counter: UInt64 = 0 + // var tmpCounter: UInt64 = 0 + + // let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) + // let totalStartDate = Date.timeIntervalSinceReferenceDate + // var totalBytes: UInt64 = 0 + + // benchmark.withMeasurement { + // var myCtr: UInt64 = 0 + // for _ in benchmark.scaledIterations { + // myCtr += 1 + // print(myCtr) + // while counter < messageCountPerBatch { + // guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + // continue + // } + // defer { + // rd_kafka_message_destroy(record) + // } + // guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { + // continue + // } + // let result = rd_kafka_commit_message(kafkaHandle, record, 1) + // precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + + // counter += 1 + // totalBytes += UInt64(record.pointee.len) + + // tmpCounter += 1 + // if tmpCounter >= interval { + // benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + // tmpCounter = 0 + // } + // } + + // // Reset counters for next iteration + // counter = 0 + // tmpCounter = 0 + // } + // } + + // rd_kafka_consumer_close(kafkaHandle) + + // let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + // let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + // benchLog( + // "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + // ) + // } } diff --git a/Benchmarks/Package.swift b/Benchmarks/Package.swift index 4301f8de..cc8f15b2 100644 --- a/Benchmarks/Package.swift +++ b/Benchmarks/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version: 5.7 +// swift-tools-version: 5.9 //===----------------------------------------------------------------------===// // // This source file is part of the swift-kafka-client open source project diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..a55847f6 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 42079 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..d969106a --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 59647 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..169c3dfd --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 1000959 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..07e0e6cc --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 29935 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..52549f62 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 49983 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..169c3dfd --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 1000959 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..d1fe7ee9 --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 31695 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..25f6172f --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 56831 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..169c3dfd --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 1000959 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..4498695c --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 29295 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..5f340cba --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 48287 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..64b28c75 --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + "mallocCountTotal" : 1000447 +} \ No newline at end of file diff --git a/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift index 2eb7b153..8c393812 100644 --- a/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift +++ b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift @@ -59,7 +59,7 @@ extension RDKafkaClient { ) guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { - throw KafkaError.topicCreation(reason: "No CreateTopics result after 10s timeout") + throw KafkaError.topicCreation(reason: "No CreateTopics result after \(timeout)ms timeout") } defer { rd_kafka_event_destroy(resultEvent) } diff --git a/Sources/Kafka/KafkaConsumer.swift b/Sources/Kafka/KafkaConsumer.swift index bd3f5542..5c917bbb 100644 --- a/Sources/Kafka/KafkaConsumer.swift +++ b/Sources/Kafka/KafkaConsumer.swift @@ -140,7 +140,7 @@ public struct KafkaConsumerMessages: Sendable, AsyncSequence { // MARK: - KafkaConsumer -/// A ``KafkaConsumer `` can be used to consume messages from a Kafka cluster. +/// A ``KafkaConsumer`` can be used to consume messages from a Kafka cluster. public final class KafkaConsumer: Sendable, Service { /// The configuration object of the consumer client. private let configuration: KafkaConsumerConfiguration diff --git a/dev/update-benchmark-thresholds.sh b/dev/update-benchmark-thresholds.sh index be8bf886..e960b7eb 100755 --- a/dev/update-benchmark-thresholds.sh +++ b/dev/update-benchmark-thresholds.sh @@ -16,13 +16,13 @@ set -eu set -o pipefail -here="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +here="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" target_repo=${2-"$here/.."} -for f in 57 58 59 510 -nightly; do +for f in 59 510 60 nightly-6.0 main; do echo "swift$f" - docker_file=$(if [[ "$f" == "-nightly" ]]; then f=main; fi && ls "$target_repo/docker/docker-compose."*"$f"*".yaml") + docker_file=$(ls "$target_repo/docker/docker-compose."*"$f"*".yaml") docker-compose -f docker/docker-compose.yaml -f "$docker_file" run update-benchmark-baseline done diff --git a/docker/docker-compose.2204.510.yaml b/docker/docker-compose.2204.510.yaml index 17acb143..269e904f 100644 --- a/docker/docker-compose.2204.510.yaml +++ b/docker/docker-compose.2204.510.yaml @@ -1,12 +1,10 @@ -version: "3" - +name: swift-kafka-client-22.04-5.10 services: - runtime-setup: image: swift-kafka-client:22.04-5.10 build: args: - base_image: "swiftlang/swift:nightly-5.10-jammy" + base_image: "swift:5.10-jammy" build: image: swift-kafka-client:22.04-5.10 diff --git a/docker/docker-compose.2204.57.yaml b/docker/docker-compose.2204.57.yaml deleted file mode 100644 index a465a610..00000000 --- a/docker/docker-compose.2204.57.yaml +++ /dev/null @@ -1,29 +0,0 @@ -version: "3" - -services: - - runtime-setup: - image: swift-kafka-client:22.04-5.7 - build: - args: - ubuntu_version: "jammy" - swift_version: "5.7" - - build: - image: swift-kafka-client:22.04-5.7 - - test: - image: swift-kafka-client:22.04-5.7 - environment: - - SWIFT_VERSION=5.7 - - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete - # - SANITIZER_ARG=--sanitize=thread # TSan broken still - - update-benchmark-baseline: - image: swift-kafka-client:22.04-5.7 - environment: - - SWIFT_VERSION=5.7 - - shell: - image: swift-kafka-client:22.04-5.7 diff --git a/docker/docker-compose.2204.59.yaml b/docker/docker-compose.2204.59.yaml index 8d9cf29d..d3738796 100644 --- a/docker/docker-compose.2204.59.yaml +++ b/docker/docker-compose.2204.59.yaml @@ -1,13 +1,10 @@ -version: "3" - +name: swift-kafka-client-22.04-5.9 services: - runtime-setup: image: swift-kafka-client:22.04-5.9 build: args: - ubuntu_version: "jammy" - swift_version: "5.9" + base_image: "swift:5.9-jammy" build: image: swift-kafka-client:22.04-5.9 diff --git a/docker/docker-compose.2204.58.yaml b/docker/docker-compose.2204.60.yaml similarity index 55% rename from docker/docker-compose.2204.58.yaml rename to docker/docker-compose.2204.60.yaml index 47b02679..6ffe45ea 100644 --- a/docker/docker-compose.2204.58.yaml +++ b/docker/docker-compose.2204.60.yaml @@ -1,30 +1,26 @@ -version: "3" - +name: swift-kafka-client-22.04-6.0 services: - runtime-setup: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 build: args: - ubuntu_version: "jammy" - swift_version: "5.8" + base_image: "swift:6.0-jammy" build: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 test: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 environment: - - SWIFT_VERSION=5.8 - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete # - SANITIZER_ARG=--sanitize=thread # TSan broken still update-benchmark-baseline: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 environment: - - SWIFT_VERSION=5.8 + - SWIFT_VERSION=6.0 shell: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 diff --git a/docker/docker-compose.2204.main.yaml b/docker/docker-compose.2204.main.yaml index acac1a54..b6e77a2b 100644 --- a/docker/docker-compose.2204.main.yaml +++ b/docker/docker-compose.2204.main.yaml @@ -1,7 +1,5 @@ -version: "3" - +name: swift-kafka-client-22.04-main services: - runtime-setup: image: swift-kafka-client:22.04-main build: diff --git a/docker/docker-compose.2204.nightly-6.0.yaml b/docker/docker-compose.2204.nightly-6.0.yaml new file mode 100644 index 00000000..65c964bb --- /dev/null +++ b/docker/docker-compose.2204.nightly-6.0.yaml @@ -0,0 +1,26 @@ +name: swift-kafka-client-22.04-nightly-6.0 +services: + runtime-setup: + image: swift-kafka-client:22.04-nightly-6.0 + build: + args: + base_image: "swiftlang/swift:nightly-6.0-jammy" + + build: + image: swift-kafka-client:22.04-nightly-6.0 + + test: + image: swift-kafka-client:22.04-nightly-6.0 + environment: + - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors + - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error + - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete + # - SANITIZER_ARG=--sanitize=thread # TSan broken still + + update-benchmark-baseline: + image: swift-kafka-client:22.04-nightly-6.0 + environment: + - SWIFT_VERSION=nightly-6.0 + + shell: + image: swift-kafka-client:22.04-nightly-6.0 diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 10f1665c..85585706 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,9 +1,7 @@ # this file is not designed to be run directly # instead, use the docker-compose.. files -# eg docker-compose -f docker/docker-compose.yaml -f docker/docker-compose.2204.57.yaml run test -version: "3.9" +# eg docker-compose -f docker/docker-compose.yaml -f docker/docker-compose.2204.59.yaml run test services: - zookeeper: image: ubuntu/zookeeper @@ -44,17 +42,17 @@ services: build: <<: *common environment: [] - command: /bin/bash -cl "swift build" + command: /bin/bash -cl "swift build --scratch-path .build/$${SWIFT_VERSION-}/" test: <<: *common depends_on: [kafka, runtime-setup] environment: - SWIFT_VERSION: 5.7 + SWIFT_VERSION: 5.9 KAFKA_HOST: kafka command: > /bin/bash -xcl " - swift build --build-tests $${SANITIZER_ARG-} && \ + swift build --scratch-path .build/$${SWIFT_VERSION-}/ --build-tests $${SANITIZER_ARG-} && \ swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} " @@ -65,7 +63,7 @@ services: KAFKA_HOST: kafka command: > /bin/bash -xcl " - cd Benchmarks && swift package --disable-sandbox benchmark + cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ benchmark " update-benchmark-baseline: