From c2686df5d7e07f8656f50fcec89f2e22970c341c Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Tue, 19 Nov 2024 15:00:27 +0000 Subject: [PATCH 1/7] Update Benchmark set up and persist benchmarks We have recently updated our toolchain version to 5.9, but have never updated the benchmark suite to use 5.9 or newer. Also, it does not look like we ever commited a baseline to this repository. While at it, wiring up GitHub actions to run the benchmarks for every pull request, as well as on a merge into `main`. Re-uses a lot of the heavy lifting that was defined in `apple/swift-nio` already. Note that `nightly-main` appears broken at the moment. The Benchmark tool gets stuck, and initial debugging point to it never finishing `_sendAndAcknowledgeMessages` when producing a set of messages before every benchmark. --- .github/workflows/benchmarks.yml | 46 ++++++ .github/workflows/cxx_interop.yml | 39 +++++ .github/workflows/main.yml | 13 ++ .github/workflows/pull_request.yml | 21 ++- .github/workflows/swift_matrix.yml | 133 ++++++++++++++++++ .github/workflows/unit_tests.yml | 71 ++-------- Benchmarks/Package.swift | 2 +- ...umer_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 3 + ...afka_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...umer_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...afka_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...umer_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...afka_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...umer_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ ...afka_basic_consumer_messages_1000.p90.json | 10 ++ ..._with_offset_commit_messages_1000.p90.json | 10 ++ .../ForTesting/RDKafkaClient+Topic.swift | 2 +- Sources/Kafka/KafkaConsumer.swift | 2 +- dev/update-benchmark-thresholds.sh | 6 +- docker/docker-compose.2204.510.yaml | 6 +- docker/docker-compose.2204.57.yaml | 29 ---- docker/docker-compose.2204.59.yaml | 7 +- ...04.58.yaml => docker-compose.2204.60.yaml} | 20 ++- docker/docker-compose.2204.main.yaml | 4 +- docker/docker-compose.2204.nightly-6.0.yaml | 26 ++++ docker/docker-compose.yaml | 12 +- 33 files changed, 458 insertions(+), 134 deletions(-) create mode 100644 .github/workflows/benchmarks.yml create mode 100644 .github/workflows/cxx_interop.yml create mode 100644 .github/workflows/swift_matrix.yml create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json create mode 100644 Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 docker/docker-compose.2204.57.yaml rename docker/{docker-compose.2204.58.yaml => docker-compose.2204.60.yaml} (55%) create mode 100644 docker/docker-compose.2204.nightly-6.0.yaml diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 00000000..31f4b2fa --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,46 @@ +name: Benchmarks + +on: + workflow_call: + inputs: + benchmark_package_path: + type: string + description: "Path to the directory containing the benchmarking package. Defaults to ." + default: "." + swift_package_arguments: + type: string + description: "Arguments to the switch package command invocation e.g. `--disable-sandbox`" + linux_5_9_enabled: + type: boolean + description: "Boolean to enable the Linux 5.9 Swift version matrix job. Defaults to true." + default: true + linux_5_10_enabled: + type: boolean + description: "Boolean to enable the Linux 5.10 Swift version matrix job. Defaults to true." + default: true + linux_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux nightly 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the Linux nightly main Swift version matrix job. Defaults to true." + default: true + +jobs: + benchmarks: + name: Benchmarks + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Benchmarks" + matrix_linux_command: "apt-get update -y -q && apt-get install -y -q libjemalloc-dev && apt-get -y install libsasl2-dev && swift package --package-path ${{ inputs.benchmark_package_path }} ${{ inputs.swift_package_arguments }} benchmark baseline check --check-absolute-path ${{ inputs.benchmark_package_path }}/Thresholds/${SWIFT_VERSION}/" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} diff --git a/.github/workflows/cxx_interop.yml b/.github/workflows/cxx_interop.yml new file mode 100644 index 00000000..fb8ef9d0 --- /dev/null +++ b/.github/workflows/cxx_interop.yml @@ -0,0 +1,39 @@ +name: Cxx interop + +on: + workflow_call: + inputs: + linux_5_9_enabled: + type: boolean + description: "Boolean to enable the Linux 5.9 Swift version matrix job. Defaults to true." + default: true + linux_5_10_enabled: + type: boolean + description: "Boolean to enable the Linux 5.10 Swift version matrix job. Defaults to true." + default: true + linux_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the Linux nightly 6.0 Swift version matrix job. Defaults to true." + default: true + linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the Linux nightly main Swift version matrix job. Defaults to true." + default: true + +jobs: + cxx-interop: + name: Cxx interop + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Cxx interop" + matrix_linux_command: "apt-get update -y -q && apt-get install -y -q jq && apt-get -y install libsasl2-dev && curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-cxx-interop-compatibility.sh | bash" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b2f403c3..4a62c521 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,6 +9,7 @@ on: jobs: unit-tests: name: Unit tests + # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/unit_tests.yml with: linux_5_9_arguments_override: "--explicit-target-dependency-import-check error -Xswiftc -strict-concurrency=complete" @@ -16,3 +17,15 @@ jobs: linux_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_main_arguments_override: "--explicit-target-dependency-import-check error" + + benchmarks: + name: Benchmarks + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/benchmarks.yml@main + with: + benchmark_package_path: "Benchmarks" + + cxx-interop: + name: Cxx interop + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 2c6a87a4..dc4f525c 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -23,17 +23,14 @@ jobs: linux_nightly_6_0_arguments_override: "--explicit-target-dependency-import-check error" linux_nightly_main_arguments_override: "--explicit-target-dependency-import-check error" + benchmarks: + name: Benchmarks + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/benchmarks.yml + with: + benchmark_package_path: "Benchmarks" + cxx-interop: name: Cxx interop - uses: apple/swift-nio/.github/workflows/swift_matrix.yml@main - with: - name: "Cxx interop" - matrix_linux_command: "apt-get update -y -q && apt-get install -y -q jq && apt-get -y install libsasl2-dev && curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-cxx-interop-compatibility.sh | bash" - matrix_linux_5_9_enabled: true - matrix_linux_5_10_enabled: true - matrix_linux_6_0_enabled: true - matrix_linux_nightly_6_0_enabled: true - matrix_linux_nightly_main_enabled: true - matrix_windows_6_0_enabled: false - matrix_windows_nightly_6_0_enabled: false - matrix_windows_nightly_main_enabled: false + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/swift_matrix.yml b/.github/workflows/swift_matrix.yml new file mode 100644 index 00000000..612718de --- /dev/null +++ b/.github/workflows/swift_matrix.yml @@ -0,0 +1,133 @@ +name: Matrix + +on: + workflow_call: + inputs: + name: + type: string + description: "The name of the workflow used for the concurrency group." + required: true + matrix_linux_command: + type: string + description: "The command of the current Swift version linux matrix job to execute." + required: true + matrix_linux_5_9_enabled: + type: boolean + description: "Boolean to enable the 5.9 Swift version matrix job. Defaults to true." + default: true + matrix_linux_5_9_container_image: + type: string + description: "Container image for the 5.9 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:5.9-jammy" + matrix_linux_5_9_command_override: + type: string + description: "The command of the 5.9 Swift version linux matrix job to execute." + matrix_linux_5_10_enabled: + type: boolean + description: "Boolean to enable the 5.10 Swift version matrix job. Defaults to true." + default: true + matrix_linux_5_10_container_image: + type: string + description: "Container image for the 5.10 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:5.10-jammy" + matrix_linux_5_10_command_override: + type: string + description: "The command of the 5.10 Swift version linux matrix job to execute." + matrix_linux_6_0_enabled: + type: boolean + description: "Boolean to enable the 6.0 Swift version matrix job. Defaults to true." + default: true + matrix_linux_6_0_container_image: + type: string + description: "Container image for the 6.0 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swift:6.0-jammy" + matrix_linux_6_0_command_override: + type: string + description: "The command of the 6.0 Swift version linux matrix job to execute." + matrix_linux_nightly_6_0_enabled: + type: boolean + description: "Boolean to enable the nightly 6.0 Swift version matrix job. Defaults to true." + default: true + matrix_linux_nightly_6_0_container_image: + type: string + description: "Container image for the nightly 6.0 Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swiftlang/swift:nightly-6.0-jammy" + matrix_linux_nightly_6_0_command_override: + type: string + description: "The command of the nightly 6.0 Swift version linux matrix job to execute." + matrix_linux_nightly_main_enabled: + type: boolean + description: "Boolean to enable the nightly main Swift version matrix job. Defaults to true." + default: true + matrix_linux_nightly_main_container_image: + type: string + description: "Container image for the nightly main Swift version matrix job. Defaults to matching Swift Ubuntu image." + default: "swiftlang/swift:nightly-main-jammy" + matrix_linux_nightly_main_command_override: + type: string + description: "The command of the nightly main Swift version linux matrix job to execute." + +# We are cancelling previously triggered workflow runs +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.name }} + cancel-in-progress: true + +jobs: + linux: + name: Linux (${{ matrix.swift.swift_version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + # We are specifying only the major and minor of the docker images to automatically pick up the latest patch release + swift: + - image: ${{ inputs.matrix_linux_5_9_container_image }} + swift_version: "5.9" + enabled: ${{ inputs.matrix_linux_5_9_enabled }} + - image: ${{ inputs.matrix_linux_5_10_container_image }} + swift_version: "5.10" + enabled: ${{ inputs.matrix_linux_5_10_enabled }} + - image: ${{ inputs.matrix_linux_6_0_container_image }} + swift_version: "6.0" + enabled: ${{ inputs.matrix_linux_6_0_enabled }} + - image: ${{ inputs.matrix_linux_nightly_6_0_container_image }} + swift_version: "nightly-6.0" + enabled: ${{ inputs.matrix_linux_nightly_6_0_enabled }} + - image: ${{ inputs.matrix_linux_nightly_main_container_image }} + swift_version: "nightly-main" + enabled: ${{ inputs.matrix_linux_nightly_main_enabled }} + container: + image: ${{ matrix.swift.image }} + steps: + - name: Checkout repository + if: ${{ matrix.swift.enabled }} + uses: actions/checkout@v4 + with: + persist-credentials: false + submodules: true + - name: Mark the workspace as safe + if: ${{ matrix.swift.enabled }} + # https://github.com/actions/checkout/issues/766 + run: git config --global --add safe.directory ${GITHUB_WORKSPACE} + - name: Run matrix job + if: ${{ matrix.swift.enabled }} + env: + SWIFT_VERSION: ${{ matrix.swift.swift_version }} + COMMAND: ${{ inputs.matrix_linux_command }} + COMMAND_OVERRIDE_5_9: ${{ inputs.matrix_linux_5_9_command_override }} + COMMAND_OVERRIDE_5_10: ${{ inputs.matrix_linux_5_10_command_override }} + COMMAND_OVERRIDE_6_0: ${{ inputs.matrix_linux_6_0_command_override }} + COMMAND_OVERRIDE_NIGHTLY_6_0: ${{ inputs.matrix_linux_nightly_6_0_command_override }} + COMMAND_OVERRIDE_NIGHTLY_MAIN: ${{ inputs.matrix_linux_nightly_main_command_override }} + run: | + apt-get -qq update && apt-get -qq -y install curl && apt-get -y install libsasl2-dev + curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-matrix-job.sh | bash + services: + zookeeper: + image: ubuntu/zookeeper + kafka: + image: ubuntu/kafka + env: + ZOOKEEPER_HOST: zookeeper + env: + KAFKA_HOST: kafka diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 3300a33b..7a3d2026 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -47,59 +47,18 @@ on: jobs: unit-tests: name: Unit tests - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - # We are specifying only the major and minor of the docker images to automatically pick up the latest patch release - swift: - - image: "swift:5.9-jammy" - swift_version: "5.9" - enabled: ${{ inputs.linux_5_9_enabled }} - - image: "swift:5.10-jammy" - swift_version: "5.10" - enabled: ${{ inputs.linux_5_10_enabled }} - - image: "swift:6.0-jammy" - swift_version: "6.0" - enabled: ${{ inputs.linux_6_0_enabled }} - - image: "swiftlang/swift:nightly-6.0-jammy" - swift_version: "nightly-6.0" - enabled: ${{ inputs.linux_nightly_6_0_enabled }} - - image: "swiftlang/swift:nightly-main-jammy" - swift_version: "nightly-main" - enabled: ${{ inputs.linux_nightly_main_enabled }} - steps: - - name: Checkout repository - if: ${{ matrix.swift.enabled }} - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: true - - name: Mark the workspace as safe - if: ${{ matrix.swift.enabled }} - # https://github.com/actions/checkout/issues/766 - run: git config --global --add safe.directory ${GITHUB_WORKSPACE} - - name: Run matrix job - if: ${{ matrix.swift.enabled }} - env: - SWIFT_VERSION: ${{ matrix.swift.swift_version }} - COMMAND: "swift test" - COMMAND_OVERRIDE_5_9: "swift test ${{ inputs.linux_5_9_arguments_override }}" - COMMAND_OVERRIDE_5_10: "swift test ${{ inputs.linux_5_10_arguments_override }}" - COMMAND_OVERRIDE_6_0: "swift test ${{ inputs.linux_6_0_arguments_override }}" - COMMAND_OVERRIDE_NIGHTLY_6_0: "swift test ${{ inputs.linux_nightly_6_0_arguments_override }}" - COMMAND_OVERRIDE_NIGHTLY_MAIN: "swift test ${{ inputs.linux_nightly_main_arguments_override }}" - run: | - apt-get -qq update && apt-get -qq -y install curl && apt-get -y install libsasl2-dev - curl -s https://raw.githubusercontent.com/apple/swift-nio/main/scripts/check-matrix-job.sh | bash - container: - image: ${{ matrix.swift.image }} - services: - zookeeper: - image: ubuntu/zookeeper - kafka: - image: ubuntu/kafka - env: - ZOOKEEPER_HOST: zookeeper - env: - KAFKA_HOST: kafka + # Workaround https://github.com/nektos/act/issues/1875 + uses: ./.github/workflows/swift_matrix.yml + with: + name: "Unit tests" + matrix_linux_command: "swift test" + matrix_linux_5_9_enabled: ${{ inputs.linux_5_9_enabled }} + matrix_linux_5_9_command_override: "swift test ${{ inputs.linux_5_9_arguments_override }}" + matrix_linux_5_10_enabled: ${{ inputs.linux_5_10_enabled }} + matrix_linux_5_10_command_override: "swift test ${{ inputs.linux_5_10_arguments_override }}" + matrix_linux_6_0_enabled: ${{ inputs.linux_6_0_enabled }} + matrix_linux_6_0_command_override: "swift test ${{ inputs.linux_6_0_arguments_override }}" + matrix_linux_nightly_6_0_enabled: ${{ inputs.linux_nightly_6_0_enabled }} + matrix_linux_nightly_6_0_command_override: "swift test ${{ inputs.linux_nightly_6_0_arguments_override }}" + matrix_linux_nightly_main_enabled: ${{ inputs.linux_nightly_main_enabled }} + matrix_linux_nightly_main_command_override: "swift test ${{ inputs.linux_nightly_main_arguments_override }}" diff --git a/Benchmarks/Package.swift b/Benchmarks/Package.swift index 4301f8de..cc8f15b2 100644 --- a/Benchmarks/Package.swift +++ b/Benchmarks/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version: 5.7 +// swift-tools-version: 5.9 //===----------------------------------------------------------------------===// // // This source file is part of the swift-kafka-client open source project diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..210167a5 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 77266944, + "cpuTotal" : 200000000, + "objectAllocCount" : 5549, + "releaseCount" : 15168, + "retainCount" : 7108, + "retainReleaseDelta" : 2511, + "throughput" : 2, + "wallClock" : 695307500 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..0e0dcd23 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,3 @@ +{ + +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..2be930a7 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 47382528, + "cpuTotal" : 10000000, + "objectAllocCount" : 16, + "releaseCount" : 48, + "retainCount" : 2, + "retainReleaseDelta" : 30, + "throughput" : 2, + "wallClock" : 640572501 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..a4bb5ea5 --- /dev/null +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 44204031, + "cpuTotal" : 80000000, + "objectAllocCount" : 16, + "releaseCount" : 48, + "retainCount" : 2, + "retainReleaseDelta" : 30, + "throughput" : 3, + "wallClock" : 320339967 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..f3f1b180 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 88211455, + "cpuTotal" : 40009727, + "objectAllocCount" : 3471, + "releaseCount" : 9175, + "retainCount" : 4435, + "retainReleaseDelta" : 1267, + "throughput" : 9, + "wallClock" : 130154495 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..803cb779 --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 73924608, + "cpuTotal" : 200000000, + "objectAllocCount" : 6039, + "releaseCount" : 14939, + "retainCount" : 6695, + "retainReleaseDelta" : 2205, + "throughput" : 2, + "wallClock" : 646714708 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..c1208ead --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 47579136, + "cpuTotal" : 20000000, + "objectAllocCount" : 16, + "releaseCount" : 48, + "retainCount" : 2, + "retainReleaseDelta" : 30, + "throughput" : 2, + "wallClock" : 648354417 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..877d1f0e --- /dev/null +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 42926080, + "cpuTotal" : 90000000, + "objectAllocCount" : 16, + "releaseCount" : 44, + "retainCount" : 2, + "retainReleaseDelta" : 26, + "throughput" : 1, + "wallClock" : 881145958 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..e9cdc8b8 --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 81592320, + "cpuTotal" : 200000000, + "objectAllocCount" : 4609, + "releaseCount" : 16389, + "retainCount" : 9217, + "retainReleaseDelta" : 2563, + "throughput" : 2, + "wallClock" : 709379209 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..f4eb9e30 --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 78512128, + "cpuTotal" : 200000000, + "objectAllocCount" : 5439, + "releaseCount" : 18714, + "retainCount" : 10871, + "retainReleaseDelta" : 2404, + "throughput" : 2, + "wallClock" : 657556084 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..40f3f3fb --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 48431104, + "cpuTotal" : 30000000, + "objectAllocCount" : 16, + "releaseCount" : 40, + "retainCount" : 2, + "retainReleaseDelta" : 22, + "throughput" : 2, + "wallClock" : 640575375 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..949815b3 --- /dev/null +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 43450368, + "cpuTotal" : 90000000, + "objectAllocCount" : 16, + "releaseCount" : 44, + "retainCount" : 2, + "retainReleaseDelta" : 26, + "throughput" : 1, + "wallClock" : 874572167 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..b26180ee --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 79691776, + "cpuTotal" : 170000000, + "objectAllocCount" : 4593, + "releaseCount" : 16334, + "retainCount" : 9187, + "retainReleaseDelta" : 2554, + "throughput" : 2, + "wallClock" : 698023125 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..e61770ca --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 72613888, + "cpuTotal" : 150000000, + "objectAllocCount" : 5459, + "releaseCount" : 18789, + "retainCount" : 10914, + "retainReleaseDelta" : 2420, + "throughput" : 2, + "wallClock" : 647447376 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json new file mode 100644 index 00000000..233f2340 --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 48037888, + "cpuTotal" : 20000000, + "objectAllocCount" : 16, + "releaseCount" : 44, + "retainCount" : 2, + "retainReleaseDelta" : 26, + "throughput" : 2, + "wallClock" : 642639208 +} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json new file mode 100644 index 00000000..6a90ed0e --- /dev/null +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json @@ -0,0 +1,10 @@ +{ + "allocatedResidentMemory" : 50200576, + "cpuTotal" : 90000000, + "objectAllocCount" : 16, + "releaseCount" : 52, + "retainCount" : 2, + "retainReleaseDelta" : 34, + "throughput" : 1, + "wallClock" : 875615959 +} \ No newline at end of file diff --git a/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift index 2eb7b153..8c393812 100644 --- a/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift +++ b/Sources/Kafka/ForTesting/RDKafkaClient+Topic.swift @@ -59,7 +59,7 @@ extension RDKafkaClient { ) guard let resultEvent = rd_kafka_queue_poll(resultQueue, timeout) else { - throw KafkaError.topicCreation(reason: "No CreateTopics result after 10s timeout") + throw KafkaError.topicCreation(reason: "No CreateTopics result after \(timeout)ms timeout") } defer { rd_kafka_event_destroy(resultEvent) } diff --git a/Sources/Kafka/KafkaConsumer.swift b/Sources/Kafka/KafkaConsumer.swift index bd3f5542..5c917bbb 100644 --- a/Sources/Kafka/KafkaConsumer.swift +++ b/Sources/Kafka/KafkaConsumer.swift @@ -140,7 +140,7 @@ public struct KafkaConsumerMessages: Sendable, AsyncSequence { // MARK: - KafkaConsumer -/// A ``KafkaConsumer `` can be used to consume messages from a Kafka cluster. +/// A ``KafkaConsumer`` can be used to consume messages from a Kafka cluster. public final class KafkaConsumer: Sendable, Service { /// The configuration object of the consumer client. private let configuration: KafkaConsumerConfiguration diff --git a/dev/update-benchmark-thresholds.sh b/dev/update-benchmark-thresholds.sh index be8bf886..e960b7eb 100755 --- a/dev/update-benchmark-thresholds.sh +++ b/dev/update-benchmark-thresholds.sh @@ -16,13 +16,13 @@ set -eu set -o pipefail -here="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +here="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" target_repo=${2-"$here/.."} -for f in 57 58 59 510 -nightly; do +for f in 59 510 60 nightly-6.0 main; do echo "swift$f" - docker_file=$(if [[ "$f" == "-nightly" ]]; then f=main; fi && ls "$target_repo/docker/docker-compose."*"$f"*".yaml") + docker_file=$(ls "$target_repo/docker/docker-compose."*"$f"*".yaml") docker-compose -f docker/docker-compose.yaml -f "$docker_file" run update-benchmark-baseline done diff --git a/docker/docker-compose.2204.510.yaml b/docker/docker-compose.2204.510.yaml index 17acb143..269e904f 100644 --- a/docker/docker-compose.2204.510.yaml +++ b/docker/docker-compose.2204.510.yaml @@ -1,12 +1,10 @@ -version: "3" - +name: swift-kafka-client-22.04-5.10 services: - runtime-setup: image: swift-kafka-client:22.04-5.10 build: args: - base_image: "swiftlang/swift:nightly-5.10-jammy" + base_image: "swift:5.10-jammy" build: image: swift-kafka-client:22.04-5.10 diff --git a/docker/docker-compose.2204.57.yaml b/docker/docker-compose.2204.57.yaml deleted file mode 100644 index a465a610..00000000 --- a/docker/docker-compose.2204.57.yaml +++ /dev/null @@ -1,29 +0,0 @@ -version: "3" - -services: - - runtime-setup: - image: swift-kafka-client:22.04-5.7 - build: - args: - ubuntu_version: "jammy" - swift_version: "5.7" - - build: - image: swift-kafka-client:22.04-5.7 - - test: - image: swift-kafka-client:22.04-5.7 - environment: - - SWIFT_VERSION=5.7 - - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete - # - SANITIZER_ARG=--sanitize=thread # TSan broken still - - update-benchmark-baseline: - image: swift-kafka-client:22.04-5.7 - environment: - - SWIFT_VERSION=5.7 - - shell: - image: swift-kafka-client:22.04-5.7 diff --git a/docker/docker-compose.2204.59.yaml b/docker/docker-compose.2204.59.yaml index 8d9cf29d..d3738796 100644 --- a/docker/docker-compose.2204.59.yaml +++ b/docker/docker-compose.2204.59.yaml @@ -1,13 +1,10 @@ -version: "3" - +name: swift-kafka-client-22.04-5.9 services: - runtime-setup: image: swift-kafka-client:22.04-5.9 build: args: - ubuntu_version: "jammy" - swift_version: "5.9" + base_image: "swift:5.9-jammy" build: image: swift-kafka-client:22.04-5.9 diff --git a/docker/docker-compose.2204.58.yaml b/docker/docker-compose.2204.60.yaml similarity index 55% rename from docker/docker-compose.2204.58.yaml rename to docker/docker-compose.2204.60.yaml index 47b02679..6ffe45ea 100644 --- a/docker/docker-compose.2204.58.yaml +++ b/docker/docker-compose.2204.60.yaml @@ -1,30 +1,26 @@ -version: "3" - +name: swift-kafka-client-22.04-6.0 services: - runtime-setup: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 build: args: - ubuntu_version: "jammy" - swift_version: "5.8" + base_image: "swift:6.0-jammy" build: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 test: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 environment: - - SWIFT_VERSION=5.8 - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete # - SANITIZER_ARG=--sanitize=thread # TSan broken still update-benchmark-baseline: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 environment: - - SWIFT_VERSION=5.8 + - SWIFT_VERSION=6.0 shell: - image: swift-kafka-client:22.04-5.8 + image: swift-kafka-client:22.04-6.0 diff --git a/docker/docker-compose.2204.main.yaml b/docker/docker-compose.2204.main.yaml index acac1a54..b6e77a2b 100644 --- a/docker/docker-compose.2204.main.yaml +++ b/docker/docker-compose.2204.main.yaml @@ -1,7 +1,5 @@ -version: "3" - +name: swift-kafka-client-22.04-main services: - runtime-setup: image: swift-kafka-client:22.04-main build: diff --git a/docker/docker-compose.2204.nightly-6.0.yaml b/docker/docker-compose.2204.nightly-6.0.yaml new file mode 100644 index 00000000..65c964bb --- /dev/null +++ b/docker/docker-compose.2204.nightly-6.0.yaml @@ -0,0 +1,26 @@ +name: swift-kafka-client-22.04-nightly-6.0 +services: + runtime-setup: + image: swift-kafka-client:22.04-nightly-6.0 + build: + args: + base_image: "swiftlang/swift:nightly-6.0-jammy" + + build: + image: swift-kafka-client:22.04-nightly-6.0 + + test: + image: swift-kafka-client:22.04-nightly-6.0 + environment: + - WARN_AS_ERROR_ARG=-Xswiftc -warnings-as-errors + - IMPORT_CHECK_ARG=--explicit-target-dependency-import-check error + - STRICT_CONCURRENCY_ARG=-Xswiftc -strict-concurrency=complete + # - SANITIZER_ARG=--sanitize=thread # TSan broken still + + update-benchmark-baseline: + image: swift-kafka-client:22.04-nightly-6.0 + environment: + - SWIFT_VERSION=nightly-6.0 + + shell: + image: swift-kafka-client:22.04-nightly-6.0 diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 10f1665c..85585706 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -1,9 +1,7 @@ # this file is not designed to be run directly # instead, use the docker-compose.. files -# eg docker-compose -f docker/docker-compose.yaml -f docker/docker-compose.2204.57.yaml run test -version: "3.9" +# eg docker-compose -f docker/docker-compose.yaml -f docker/docker-compose.2204.59.yaml run test services: - zookeeper: image: ubuntu/zookeeper @@ -44,17 +42,17 @@ services: build: <<: *common environment: [] - command: /bin/bash -cl "swift build" + command: /bin/bash -cl "swift build --scratch-path .build/$${SWIFT_VERSION-}/" test: <<: *common depends_on: [kafka, runtime-setup] environment: - SWIFT_VERSION: 5.7 + SWIFT_VERSION: 5.9 KAFKA_HOST: kafka command: > /bin/bash -xcl " - swift build --build-tests $${SANITIZER_ARG-} && \ + swift build --scratch-path .build/$${SWIFT_VERSION-}/ --build-tests $${SANITIZER_ARG-} && \ swift $${SWIFT_TEST_VERB-test} $${WARN_AS_ERROR_ARG-} $${SANITIZER_ARG-} $${IMPORT_CHECK_ARG-} $${STRICT_CONCURRENCY_ARG-} " @@ -65,7 +63,7 @@ services: KAFKA_HOST: kafka command: > /bin/bash -xcl " - cd Benchmarks && swift package --disable-sandbox benchmark + cd Benchmarks && swift package --disable-sandbox --scratch-path .build/$${SWIFT_VERSION-}/ benchmark " update-benchmark-baseline: From 1197894c50d244bd777474a2e1e107352b03dd5e Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Mon, 25 Nov 2024 16:03:17 +0000 Subject: [PATCH 2/7] Address reviewer comment Removing the workaround comment --- .github/workflows/benchmarks.yml | 1 - .github/workflows/cxx_interop.yml | 1 - .github/workflows/main.yml | 3 --- .github/workflows/pull_request.yml | 2 -- .github/workflows/unit_tests.yml | 1 - 5 files changed, 8 deletions(-) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 31f4b2fa..ce86ed39 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -34,7 +34,6 @@ on: jobs: benchmarks: name: Benchmarks - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/swift_matrix.yml with: name: "Benchmarks" diff --git a/.github/workflows/cxx_interop.yml b/.github/workflows/cxx_interop.yml index fb8ef9d0..ce27f060 100644 --- a/.github/workflows/cxx_interop.yml +++ b/.github/workflows/cxx_interop.yml @@ -27,7 +27,6 @@ on: jobs: cxx-interop: name: Cxx interop - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/swift_matrix.yml with: name: "Cxx interop" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4a62c521..12adf777 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,7 +9,6 @@ on: jobs: unit-tests: name: Unit tests - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/unit_tests.yml with: linux_5_9_arguments_override: "--explicit-target-dependency-import-check error -Xswiftc -strict-concurrency=complete" @@ -20,12 +19,10 @@ jobs: benchmarks: name: Benchmarks - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/benchmarks.yml@main with: benchmark_package_path: "Benchmarks" cxx-interop: name: Cxx interop - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index dc4f525c..fa7d2d14 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -25,12 +25,10 @@ jobs: benchmarks: name: Benchmarks - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/benchmarks.yml with: benchmark_package_path: "Benchmarks" cxx-interop: name: Cxx interop - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/cxx_interop.yml diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 7a3d2026..96773e82 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -47,7 +47,6 @@ on: jobs: unit-tests: name: Unit tests - # Workaround https://github.com/nektos/act/issues/1875 uses: ./.github/workflows/swift_matrix.yml with: name: "Unit tests" From cf7bfc43c04adfb560293791aa87f448ed83b7d9 Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Mon, 25 Nov 2024 16:03:54 +0000 Subject: [PATCH 3/7] Remove dangling `@main` from GH action --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 12adf777..983b1e19 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -19,7 +19,7 @@ jobs: benchmarks: name: Benchmarks - uses: ./.github/workflows/benchmarks.yml@main + uses: ./.github/workflows/benchmarks.yml with: benchmark_package_path: "Benchmarks" From 55a58b2b56a9054f42c4e63faf10a8420e9e36b2 Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Tue, 26 Nov 2024 14:32:42 +0000 Subject: [PATCH 4/7] Run consume loop 1000 times each Changes the benchmark to iterate over the consume loop a total of 1000 times. We now produce 1000*1000 (1e6) messages into the topic, and on every benchmark iteration consume 1000 messages each. I had to comment the `librdkafka_with_offset_commit_messages_*` benchmark. For some reason, the benchmark suite keeps re-running it, and I have seen occasional failures when attempting to commit offsets. It's unclear to me why that happens right now, but decided it's not worth the investigation at the moment. --- .../KafkaConsumerBenchmark.swift | 309 ++++++++++-------- ...umer_basic_consumer_messages_1000.p90.json | 14 +- ..._with_offset_commit_messages_1000.p90.json | 9 +- ...afka_basic_consumer_messages_1000.p90.json | 8 +- ..._with_offset_commit_messages_1000.p90.json | 10 - ...umer_basic_consumer_messages_1000.p90.json | 16 +- ..._with_offset_commit_messages_1000.p90.json | 14 +- ...afka_basic_consumer_messages_1000.p90.json | 12 +- ..._with_offset_commit_messages_1000.p90.json | 10 - ...umer_basic_consumer_messages_1000.p90.json | 14 +- ..._with_offset_commit_messages_1000.p90.json | 14 +- ...afka_basic_consumer_messages_1000.p90.json | 8 +- ..._with_offset_commit_messages_1000.p90.json | 10 - ...umer_basic_consumer_messages_1000.p90.json | 14 +- ..._with_offset_commit_messages_1000.p90.json | 14 +- ...afka_basic_consumer_messages_1000.p90.json | 12 +- ..._with_offset_commit_messages_1000.p90.json | 10 - 17 files changed, 247 insertions(+), 251 deletions(-) delete mode 100644 Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json delete mode 100644 Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 9c49a2c3..cecd072d 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -24,7 +24,11 @@ import struct Foundation.UUID let benchmarks = { var uniqueTestTopic: String! - let messageCount: UInt = 1000 + let numberOfPartitions: UInt = 4 + // We perform every benchmark this many times + let numberOfBatches: UInt = 1000 + // In every benchmark iteration, we consume this many messages + let messageCountPerBatch: UInt = 1000 Benchmark.defaultConfiguration = .init( metrics: [ @@ -34,10 +38,10 @@ let benchmarks = { .throughput, .allocatedResidentMemory, ] + .arc, - warmupIterations: 0, - scalingFactor: .one, - maxDuration: .seconds(5), - maxIterations: 100, + // We need to tell the benchmarking framework how often we are running the benchmark. + scalingFactor: .kilo, + maxDuration: .seconds(10_000_000), + maxIterations: 10, thresholds: [ .wallClock: .init(relative: [.p90: 35]), .cpuTotal: .init(relative: [.p90: 35]), @@ -52,7 +56,7 @@ let benchmarks = { ) Benchmark.setup = { - uniqueTestTopic = try await prepareTopic(messagesCount: messageCount, partitions: 6) + uniqueTestTopic = try await prepareTopic(messagesCount: messageCountPerBatch * numberOfBatches, partitions: numberOfPartitions) } Benchmark.teardown = { @@ -62,7 +66,7 @@ let benchmarks = { uniqueTestTopic = nil } - Benchmark("SwiftKafkaConsumer_basic_consumer_messages_\(messageCount)") { benchmark in + Benchmark("SwiftKafkaConsumer_basic_consumer_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -93,31 +97,36 @@ let benchmarks = { defer { benchLog("Finish consuming") } - // Run Task + // Run task group.addTask { try await serviceGroup.run() } - // Second Consumer Task + // Consumer task group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 try await benchmark.withMeasurement { - for try await record in consumer.messages { - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break + for _ in benchmark.scaledIterations { + for try await record in consumer.messages { + counter += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } + if counter >= messageCountPerBatch { + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 + break + } } } } @@ -125,18 +134,18 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } - // Wait for second Consumer Task to complete + // Wait for consumer task to complete try await group.next() // Shutdown the serviceGroup await serviceGroup.triggerGracefulShutdown() } } - Benchmark("SwiftKafkaConsumer_with_offset_commit_messages_\(messageCount)") { benchmark in + Benchmark("SwiftKafkaConsumer_with_offset_commit_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString var consumerConfig = KafkaConsumerConfiguration( consumptionStrategy: .group( @@ -168,33 +177,38 @@ let benchmarks = { defer { benchLog("Finish consuming") } - // Run Task + // Run task group.addTask { try await serviceGroup.run() } - // Second Consumer Task + // Consumer task group.addTask { - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 try await benchmark.withMeasurement { - for try await record in consumer.messages { - try consumer.scheduleCommit(record) - - ctr += 1 - totalBytes += UInt64(record.value.readableBytes) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - if ctr >= messageCount { - break + for _ in benchmark.scaledIterations { + for try await record in consumer.messages { + try consumer.scheduleCommit(record) + + counter += 1 + totalBytes += UInt64(record.value.readableBytes) + + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } + if counter >= messageCountPerBatch { + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 + break + } } } } @@ -202,18 +216,18 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } - // Wait for second Consumer Task to complete + // Wait for consumer cask to complete try await group.next() // Shutdown the serviceGroup await serviceGroup.triggerGracefulShutdown() } } - Benchmark("librdkafka_basic_consumer_messages_\(messageCount)") { benchmark in + Benchmark("librdkafka_basic_consumer_messages_\(messageCountPerBatch)") { benchmark in let uniqueGroupID = UUID().uuidString let rdKafkaConsumerConfig: [String: String] = [ "group.id": uniqueGroupID, @@ -248,106 +262,35 @@ let benchmarks = { rd_kafka_subscribe(kafkaHandle, subscriptionList) rd_kafka_poll(kafkaHandle, 0) - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 + var counter: UInt64 = 0 + var tmpCounter: UInt64 = 0 - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) + let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) let totalStartDate = Date.timeIntervalSinceReferenceDate var totalBytes: UInt64 = 0 benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - ctr += 1 - totalBytes += UInt64(record.pointee.len) - - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } - } - } - - rd_kafka_consumer_close(kafkaHandle) - - let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate - let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 - benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" - ) - } - - Benchmark("librdkafka_with_offset_commit_messages_\(messageCount)") { benchmark in - let uniqueGroupID = UUID().uuidString - let rdKafkaConsumerConfig: [String: String] = [ - "group.id": uniqueGroupID, - "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", - "broker.address.family": "v4", - "auto.offset.reset": "beginning", - "enable.auto.commit": "false", - ] - - let configPointer: OpaquePointer = rd_kafka_conf_new() - for (key, value) in rdKafkaConsumerConfig { - precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) - } - - let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) - guard let kafkaHandle else { - preconditionFailure("Kafka handle was not created") - } - defer { - rd_kafka_destroy(kafkaHandle) - } - - rd_kafka_poll_set_consumer(kafkaHandle) - let subscriptionList = rd_kafka_topic_partition_list_new(1) - defer { - rd_kafka_topic_partition_list_destroy(subscriptionList) - } - rd_kafka_topic_partition_list_add( - subscriptionList, - uniqueTestTopic, - RD_KAFKA_PARTITION_UA - ) - rd_kafka_subscribe(kafkaHandle, subscriptionList) - rd_kafka_poll(kafkaHandle, 0) - - var ctr: UInt64 = 0 - var tmpCtr: UInt64 = 0 - - let interval: UInt64 = Swift.max(UInt64(messageCount / 20), 1) - let totalStartDate = Date.timeIntervalSinceReferenceDate - var totalBytes: UInt64 = 0 + for _ in benchmark.scaledIterations { + while counter < messageCountPerBatch { + guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + continue + } + defer { + rd_kafka_message_destroy(record) + } + counter += 1 + totalBytes += UInt64(record.pointee.len) - benchmark.withMeasurement { - while ctr < messageCount { - guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { - continue - } - defer { - rd_kafka_message_destroy(record) - } - guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { - continue + tmpCounter += 1 + if tmpCounter >= interval { + benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + tmpCounter = 0 + } } - let result = rd_kafka_commit_message(kafkaHandle, record, 0) - precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) - - ctr += 1 - totalBytes += UInt64(record.pointee.len) - tmpCtr += 1 - if tmpCtr >= interval { - benchLog("read \(ctr * 100 / UInt64(messageCount))%") - tmpCtr = 0 - } + // Reset counters for next iteration + counter = 0 + tmpCounter = 0 } } @@ -356,7 +299,93 @@ let benchmarks = { let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 benchLog( - "All read up to ctr: \(ctr), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" ) } + + // Benchmark("librdkafka_with_offset_commit_messages_\(messageCountPerBatch)") { benchmark in + // let uniqueGroupID = UUID().uuidString + // let rdKafkaConsumerConfig: [String: String] = [ + // "group.id": uniqueGroupID, + // "bootstrap.servers": "\(brokerAddress.host):\(brokerAddress.port)", + // "broker.address.family": "v4", + // "auto.offset.reset": "beginning", + // "enable.auto.commit": "false", + // ] + + // let configPointer: OpaquePointer = rd_kafka_conf_new() + // for (key, value) in rdKafkaConsumerConfig { + // precondition(rd_kafka_conf_set(configPointer, key, value, nil, 0) == RD_KAFKA_CONF_OK) + // } + + // let kafkaHandle = rd_kafka_new(RD_KAFKA_CONSUMER, configPointer, nil, 0) + // guard let kafkaHandle else { + // preconditionFailure("Kafka handle was not created") + // } + // defer { + // rd_kafka_destroy(kafkaHandle) + // } + + // rd_kafka_poll_set_consumer(kafkaHandle) + // let subscriptionList = rd_kafka_topic_partition_list_new(1) + // defer { + // rd_kafka_topic_partition_list_destroy(subscriptionList) + // } + // rd_kafka_topic_partition_list_add( + // subscriptionList, + // uniqueTestTopic, + // RD_KAFKA_PARTITION_UA + // ) + // rd_kafka_subscribe(kafkaHandle, subscriptionList) + // rd_kafka_poll(kafkaHandle, 0) + + // var counter: UInt64 = 0 + // var tmpCounter: UInt64 = 0 + + // let interval: UInt64 = Swift.max(UInt64(messageCountPerBatch / 20), 1) + // let totalStartDate = Date.timeIntervalSinceReferenceDate + // var totalBytes: UInt64 = 0 + + // benchmark.withMeasurement { + // var myCtr: UInt64 = 0 + // for _ in benchmark.scaledIterations { + // myCtr += 1 + // print(myCtr) + // while counter < messageCountPerBatch { + // guard let record = rd_kafka_consumer_poll(kafkaHandle, 10) else { + // continue + // } + // defer { + // rd_kafka_message_destroy(record) + // } + // guard record.pointee.err != RD_KAFKA_RESP_ERR__PARTITION_EOF else { + // continue + // } + // let result = rd_kafka_commit_message(kafkaHandle, record, 1) + // precondition(result == RD_KAFKA_RESP_ERR_NO_ERROR) + + // counter += 1 + // totalBytes += UInt64(record.pointee.len) + + // tmpCounter += 1 + // if tmpCounter >= interval { + // benchLog("read \(counter * 100 / UInt64(messageCountPerBatch))%") + // tmpCounter = 0 + // } + // } + + // // Reset counters for next iteration + // counter = 0 + // tmpCounter = 0 + // } + // } + + // rd_kafka_consumer_close(kafkaHandle) + + // let timeIntervalTotal = Date.timeIntervalSinceReferenceDate - totalStartDate + // let avgRateMb = Double(totalBytes) / timeIntervalTotal / 1024 + // benchLog( + // "All read up to counter: \(counter), avgRate: (\(Int(avgRateMb))KB/s), timePassed: \(Int(timeIntervalTotal))sec" + // ) + // } } diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 210167a5..b537195e 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 77266944, - "cpuTotal" : 200000000, - "objectAllocCount" : 5549, - "releaseCount" : 15168, - "retainCount" : 7108, - "retainReleaseDelta" : 2511, + "allocatedResidentMemory" : 562561023, + "cpuTotal" : 210108415, + "objectAllocCount" : 5971, + "releaseCount" : 17775, + "retainCount" : 9638, + "retainReleaseDelta" : 2175, "throughput" : 2, - "wallClock" : 695307500 + "wallClock" : 638582783 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 0e0dcd23..918e44b7 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,3 +1,10 @@ { - + "allocatedResidentMemory" : 554172415, + "cpuTotal" : 170000000, + "objectAllocCount" : 6991, + "releaseCount" : 19823, + "retainCount" : 10655, + "retainReleaseDelta" : 2185, + "throughput" : 2, + "wallClock" : 629669887 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 2be930a7..30b6a894 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 47382528, - "cpuTotal" : 10000000, + "allocatedResidentMemory" : 190971903, + "cpuTotal" : 460000000, "objectAllocCount" : 16, "releaseCount" : 48, "retainCount" : 2, "retainReleaseDelta" : 30, - "throughput" : 2, - "wallClock" : 640572501 + "throughput" : 1, + "wallClock" : 1870163542 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index a4bb5ea5..00000000 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 44204031, - "cpuTotal" : 80000000, - "objectAllocCount" : 16, - "releaseCount" : 48, - "retainCount" : 2, - "retainReleaseDelta" : 30, - "throughput" : 3, - "wallClock" : 320339967 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index f3f1b180..a150c38b 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 88211455, - "cpuTotal" : 40009727, - "objectAllocCount" : 3471, - "releaseCount" : 9175, - "retainCount" : 4435, - "retainReleaseDelta" : 1267, - "throughput" : 9, - "wallClock" : 130154495 + "allocatedResidentMemory" : 574619647, + "cpuTotal" : 240000000, + "objectAllocCount" : 5907, + "releaseCount" : 16639, + "retainCount" : 8591, + "retainReleaseDelta" : 2135, + "throughput" : 2, + "wallClock" : 647495679 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 803cb779..328aa230 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 73924608, - "cpuTotal" : 200000000, - "objectAllocCount" : 6039, - "releaseCount" : 14939, - "retainCount" : 6695, - "retainReleaseDelta" : 2205, + "allocatedResidentMemory" : 551026687, + "cpuTotal" : 180092927, + "objectAllocCount" : 7011, + "releaseCount" : 18865, + "retainCount" : 9668, + "retainReleaseDelta" : 2187, "throughput" : 2, - "wallClock" : 646714708 + "wallClock" : 639631359 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index c1208ead..429d9257 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 47579136, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 265158655, + "cpuTotal" : 490209279, "objectAllocCount" : 16, - "releaseCount" : 48, + "releaseCount" : 52, "retainCount" : 2, - "retainReleaseDelta" : 30, - "throughput" : 2, - "wallClock" : 648354417 + "retainReleaseDelta" : 34, + "throughput" : 1, + "wallClock" : 1876951039 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 877d1f0e..00000000 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 42926080, - "cpuTotal" : 90000000, - "objectAllocCount" : 16, - "releaseCount" : 44, - "retainCount" : 2, - "retainReleaseDelta" : 26, - "throughput" : 1, - "wallClock" : 881145958 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index e9cdc8b8..41860ddc 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 81592320, - "cpuTotal" : 200000000, - "objectAllocCount" : 4609, - "releaseCount" : 16389, - "retainCount" : 9217, - "retainReleaseDelta" : 2563, + "allocatedResidentMemory" : 566231039, + "cpuTotal" : 200015871, + "objectAllocCount" : 5431, + "releaseCount" : 18687, + "retainCount" : 10863, + "retainReleaseDelta" : 2415, "throughput" : 2, - "wallClock" : 709379209 + "wallClock" : 649592831 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index f4eb9e30..90deb824 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 78512128, - "cpuTotal" : 200000000, - "objectAllocCount" : 5439, - "releaseCount" : 18714, - "retainCount" : 10871, - "retainReleaseDelta" : 2404, + "allocatedResidentMemory" : 533725183, + "cpuTotal" : 180092927, + "objectAllocCount" : 6447, + "releaseCount" : 21743, + "retainCount" : 12895, + "retainReleaseDelta" : 2417, "throughput" : 2, - "wallClock" : 657556084 + "wallClock" : 639107071 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 40f3f3fb..0df839df 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 48431104, - "cpuTotal" : 30000000, + "allocatedResidentMemory" : 565706751, + "cpuTotal" : 450101247, "objectAllocCount" : 16, "releaseCount" : 40, "retainCount" : 2, "retainReleaseDelta" : 22, - "throughput" : 2, - "wallClock" : 640575375 + "throughput" : 1, + "wallClock" : 1858076671 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 949815b3..00000000 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 43450368, - "cpuTotal" : 90000000, - "objectAllocCount" : 16, - "releaseCount" : 44, - "retainCount" : 2, - "retainReleaseDelta" : 26, - "throughput" : 1, - "wallClock" : 874572167 -} \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index b26180ee..ba426c9e 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 79691776, - "cpuTotal" : 170000000, - "objectAllocCount" : 4593, - "releaseCount" : 16334, - "retainCount" : 9187, - "retainReleaseDelta" : 2554, + "allocatedResidentMemory" : 582811648, + "cpuTotal" : 220000000, + "objectAllocCount" : 5379, + "releaseCount" : 18495, + "retainCount" : 10767, + "retainReleaseDelta" : 2341, "throughput" : 2, - "wallClock" : 698023125 + "wallClock" : 655359999 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index e61770ca..002560c9 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 72613888, - "cpuTotal" : 150000000, - "objectAllocCount" : 5459, - "releaseCount" : 18789, - "retainCount" : 10914, - "retainReleaseDelta" : 2420, + "allocatedResidentMemory" : 490340352, + "cpuTotal" : 180000000, + "objectAllocCount" : 6411, + "releaseCount" : 21615, + "retainCount" : 12815, + "retainReleaseDelta" : 2381, "throughput" : 2, - "wallClock" : 647447376 + "wallClock" : 645922815 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 233f2340..2e6629be 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,10 @@ { - "allocatedResidentMemory" : 48037888, - "cpuTotal" : 20000000, + "allocatedResidentMemory" : 587726847, + "cpuTotal" : 460000000, "objectAllocCount" : 16, - "releaseCount" : 44, + "releaseCount" : 52, "retainCount" : 2, - "retainReleaseDelta" : 26, - "throughput" : 2, - "wallClock" : 642639208 + "retainReleaseDelta" : 34, + "throughput" : 1, + "wallClock" : 1866465279 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json deleted file mode 100644 index 6a90ed0e..00000000 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_with_offset_commit_messages_1000.p90.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "allocatedResidentMemory" : 50200576, - "cpuTotal" : 90000000, - "objectAllocCount" : 16, - "releaseCount" : 52, - "retainCount" : 2, - "retainReleaseDelta" : 34, - "throughput" : 1, - "wallClock" : 875615959 -} \ No newline at end of file From 415b7cbe31036da9439dddfc15bd06c8f2bf4018 Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Tue, 26 Nov 2024 14:39:31 +0000 Subject: [PATCH 5/7] Run `swift format` --- .../KafkaConsumerBenchmark.swift | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index cecd072d..66e9bcb4 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -56,7 +56,10 @@ let benchmarks = { ) Benchmark.setup = { - uniqueTestTopic = try await prepareTopic(messagesCount: messageCountPerBatch * numberOfBatches, partitions: numberOfPartitions) + uniqueTestTopic = try await prepareTopic( + messagesCount: messageCountPerBatch * numberOfBatches, + partitions: numberOfPartitions + ) } Benchmark.teardown = { From 62ae8e2813f6ba080840a63364fa53ed190622ec Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Tue, 26 Nov 2024 14:56:18 +0000 Subject: [PATCH 6/7] Fix type typo `swift build` was happy for whatever reason beofre. --- .../SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 66e9bcb4..8ee2151a 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -24,7 +24,7 @@ import struct Foundation.UUID let benchmarks = { var uniqueTestTopic: String! - let numberOfPartitions: UInt = 4 + let numberOfPartitions: Int32 = 4 // We perform every benchmark this many times let numberOfBatches: UInt = 1000 // In every benchmark iteration, we consume this many messages From baa1e9e73da6fe5e594e199b415528a5f02bcb46 Mon Sep 17 00:00:00 2001 From: Michael Gecht Date: Tue, 26 Nov 2024 15:51:16 +0000 Subject: [PATCH 7/7] Only measure `mallocCountTotal` in Benchmarks Keeps `scalingFactor: .kilo`, but only measures `.mallocCountTotal`, as that should be a reproducible value across different systems. --- .../KafkaConsumerBenchmark.swift | 21 +++---------------- ...umer_basic_consumer_messages_1000.p90.json | 9 +------- ..._with_offset_commit_messages_1000.p90.json | 9 +------- ...afka_basic_consumer_messages_1000.p90.json | 9 +------- ...umer_basic_consumer_messages_1000.p90.json | 9 +------- ..._with_offset_commit_messages_1000.p90.json | 9 +------- ...afka_basic_consumer_messages_1000.p90.json | 9 +------- ...umer_basic_consumer_messages_1000.p90.json | 9 +------- ..._with_offset_commit_messages_1000.p90.json | 9 +------- ...afka_basic_consumer_messages_1000.p90.json | 9 +------- ...umer_basic_consumer_messages_1000.p90.json | 9 +------- ..._with_offset_commit_messages_1000.p90.json | 9 +------- ...afka_basic_consumer_messages_1000.p90.json | 9 +------- 13 files changed, 15 insertions(+), 114 deletions(-) diff --git a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift index 8ee2151a..837e4315 100644 --- a/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift +++ b/Benchmarks/Benchmarks/SwiftKafkaConsumerBenchmarks/KafkaConsumerBenchmark.swift @@ -32,27 +32,12 @@ let benchmarks = { Benchmark.defaultConfiguration = .init( metrics: [ - .wallClock, - .cpuTotal, - .contextSwitches, - .throughput, - .allocatedResidentMemory, - ] + .arc, + .mallocCountTotal, + ], // We need to tell the benchmarking framework how often we are running the benchmark. scalingFactor: .kilo, maxDuration: .seconds(10_000_000), - maxIterations: 10, - thresholds: [ - .wallClock: .init(relative: [.p90: 35]), - .cpuTotal: .init(relative: [.p90: 35]), - .allocatedResidentMemory: .init(relative: [.p90: 20]), - .contextSwitches: .init(relative: [.p90: 35]), - .throughput: .init(relative: [.p90: 35]), - .objectAllocCount: .init(relative: [.p90: 20]), - .retainCount: .init(relative: [.p90: 20]), - .releaseCount: .init(relative: [.p90: 20]), - .retainReleaseDelta: .init(relative: [.p90: 20]), - ] + maxIterations: 10 ) Benchmark.setup = { diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index b537195e..a55847f6 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 562561023, - "cpuTotal" : 210108415, - "objectAllocCount" : 5971, - "releaseCount" : 17775, - "retainCount" : 9638, - "retainReleaseDelta" : 2175, - "throughput" : 2, - "wallClock" : 638582783 + "mallocCountTotal" : 42079 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 918e44b7..d969106a 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 554172415, - "cpuTotal" : 170000000, - "objectAllocCount" : 6991, - "releaseCount" : 19823, - "retainCount" : 10655, - "retainReleaseDelta" : 2185, - "throughput" : 2, - "wallClock" : 629669887 + "mallocCountTotal" : 59647 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 30b6a894..169c3dfd 100644 --- a/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.10/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 190971903, - "cpuTotal" : 460000000, - "objectAllocCount" : 16, - "releaseCount" : 48, - "retainCount" : 2, - "retainReleaseDelta" : 30, - "throughput" : 1, - "wallClock" : 1870163542 + "mallocCountTotal" : 1000959 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index a150c38b..07e0e6cc 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 574619647, - "cpuTotal" : 240000000, - "objectAllocCount" : 5907, - "releaseCount" : 16639, - "retainCount" : 8591, - "retainReleaseDelta" : 2135, - "throughput" : 2, - "wallClock" : 647495679 + "mallocCountTotal" : 29935 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 328aa230..52549f62 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 551026687, - "cpuTotal" : 180092927, - "objectAllocCount" : 7011, - "releaseCount" : 18865, - "retainCount" : 9668, - "retainReleaseDelta" : 2187, - "throughput" : 2, - "wallClock" : 639631359 + "mallocCountTotal" : 49983 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 429d9257..169c3dfd 100644 --- a/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/5.9/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 265158655, - "cpuTotal" : 490209279, - "objectAllocCount" : 16, - "releaseCount" : 52, - "retainCount" : 2, - "retainReleaseDelta" : 34, - "throughput" : 1, - "wallClock" : 1876951039 + "mallocCountTotal" : 1000959 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index 41860ddc..d1fe7ee9 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 566231039, - "cpuTotal" : 200015871, - "objectAllocCount" : 5431, - "releaseCount" : 18687, - "retainCount" : 10863, - "retainReleaseDelta" : 2415, - "throughput" : 2, - "wallClock" : 649592831 + "mallocCountTotal" : 31695 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 90deb824..25f6172f 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 533725183, - "cpuTotal" : 180092927, - "objectAllocCount" : 6447, - "releaseCount" : 21743, - "retainCount" : 12895, - "retainReleaseDelta" : 2417, - "throughput" : 2, - "wallClock" : 639107071 + "mallocCountTotal" : 56831 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 0df839df..169c3dfd 100644 --- a/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 565706751, - "cpuTotal" : 450101247, - "objectAllocCount" : 16, - "releaseCount" : 40, - "retainCount" : 2, - "retainReleaseDelta" : 22, - "throughput" : 1, - "wallClock" : 1858076671 + "mallocCountTotal" : 1000959 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json index ba426c9e..4498695c 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 582811648, - "cpuTotal" : 220000000, - "objectAllocCount" : 5379, - "releaseCount" : 18495, - "retainCount" : 10767, - "retainReleaseDelta" : 2341, - "throughput" : 2, - "wallClock" : 655359999 + "mallocCountTotal" : 29295 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json index 002560c9..5f340cba 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.SwiftKafkaConsumer_with_offset_commit_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 490340352, - "cpuTotal" : 180000000, - "objectAllocCount" : 6411, - "releaseCount" : 21615, - "retainCount" : 12815, - "retainReleaseDelta" : 2381, - "throughput" : 2, - "wallClock" : 645922815 + "mallocCountTotal" : 48287 } \ No newline at end of file diff --git a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json index 2e6629be..64b28c75 100644 --- a/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json +++ b/Benchmarks/Thresholds/nightly-6.0/SwiftKafkaConsumerBenchmarks.librdkafka_basic_consumer_messages_1000.p90.json @@ -1,10 +1,3 @@ { - "allocatedResidentMemory" : 587726847, - "cpuTotal" : 460000000, - "objectAllocCount" : 16, - "releaseCount" : 52, - "retainCount" : 2, - "retainReleaseDelta" : 34, - "throughput" : 1, - "wallClock" : 1866465279 + "mallocCountTotal" : 1000447 } \ No newline at end of file