diff --git a/.github/workflows/analysis_workflow.yml b/.github/workflows/analysis_workflow.yml index 02f7c80a86..42d9e29b70 100644 --- a/.github/workflows/analysis_workflow.yml +++ b/.github/workflows/analysis_workflow.yml @@ -1,36 +1,5 @@ name: Build with analysis tools -on: - workflow_dispatch: - inputs: - run_all_benchmarks: - description: Run all benchmarks - type: boolean - default: false - dev_image_tag: - description: Tag of the ArcticDB development image to use for benchmark and code coverage flows - type: string - default: latest - suite_to_run: - description: Run LMDB suite or REAL storage (or both - ALL) - type: choice - options: - - 'LMDB' - - 'REAL' - - 'ALL' - default: 'LMDB' - suite_overwrite: - description: Specify regular expression for specific tests to be executed - type: string - default: '' - - - schedule: # Schedule the job to run at 12 a.m. daily - - cron: '0 0 * * *' - - pull_request_target: - paths-ignore: - - "**/*.md" - +on: workflow_call jobs: get_commits_to_benchmark: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3acf0fe808..1871935dc9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,53 +1,7 @@ name: Build and Test -on: - push: - # On push only local storage tests get executed - tags: ["v**"] - branches: ["master"] - pull_request: - # On pull requests only local storage tests get executed - branches: ["**"] - schedule: - # IMPORTANT: For scheduled job we execute AWS_S3 - - cron: '0 23 * * 0,1,2,3,4' # Start previous dat at 23:00 to finish next day - workflow_dispatch: - inputs: - persistent_storage: - description: Run the persistent storage tests? - type: choice - options: - - 'no' - - 'AWS_S3' - - 'GCPXML' - default: 'no' - pypi_publish: - type: boolean - publish_env: - description: Environment to publish to - type: environment - cmake_preset_type: - description: Override CMAKE preset type - type: choice - options: ["-", debug, release] - dev_image_tag: - description: Tag of the ArcticDB development image to use for the Linux C++ tests build - type: string - default: arcticdb-dev-clang:latest - pytest_args: - description: Rewrite what tests will run or do your own pytest line if string starts with pytest ... (Example -- pytest -n auto -v --count=50 -x python/tests/compat) - type: string - default: "" - version_cache_full_test: - description: 'Run tests with both version cache 0 and 2000000000' - required: false - default: false - type: boolean - macos_enabled: - description: Enable macOS tests - required: false - default: false - type: boolean +on: workflow_call + run-name: Building ${{github.ref_name}} on ${{github.event_name}} by ${{github.actor}} concurrency: group: ${{github.ref}} diff --git a/.github/workflows/build_with_conda.yml b/.github/workflows/build_with_conda.yml index 1a9c88a3f2..77ba74d0e8 100644 --- a/.github/workflows/build_with_conda.yml +++ b/.github/workflows/build_with_conda.yml @@ -1,32 +1,5 @@ name: Build with conda -on: - push: - branches: - - master - # For Pull-Requests, this runs the CI on merge commit - # of HEAD with the target branch instead on HEAD, allowing - # testing against potential new states which might have - # been introduced in the target branch last commits. - # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request - pull_request: - - workflow_dispatch: - inputs: - run_on_arm_mac: - description: 'Run on arm macos' - type: boolean - required: false - default: false - run_cpp_tests: - description: 'Run C++ tests' - type: boolean - required: true - default: true - run_custom_pytest_command: - description: 'Run custom pytest command (curdir is project root). Or pass additional arguments to default command' - type: string - required: false - default: "" +on: workflow_call jobs: diff --git a/.github/workflows/static_analysis.yml b/.github/workflows/static_analysis.yml index e3d70b1a06..6d36cda023 100644 --- a/.github/workflows/static_analysis.yml +++ b/.github/workflows/static_analysis.yml @@ -4,6 +4,12 @@ workflow_dispatch: schedule: - cron: "0 3 * * *" + pull_request: + + permissions: + pull-requests: write + contents: read + discussions: write jobs: polaris-scan: @@ -55,7 +61,7 @@ run: cp ${{github.workspace}}/coverity.yaml ${{github.workspace}}/cpp/out/linux-release-build - name: Polaris PR Scan - uses: blackduck-inc/black-duck-security-scan@v2.0.0 + uses: blackduck-inc/black-duck-security-scan@latest with: polaris_server_url: ${{ vars.POLARIS_SERVER_URL }} polaris_access_token: ${{ secrets.POLARIS_ACCESS_TOKEN }} @@ -73,3 +79,4 @@ polaris_reports_sarif_groupSCAIssues: true polaris_upload_sarif_report: true polaris_prComment_severities: "high,critical,medium,low" + diff --git a/cpp/arcticdb/async/async_store.hpp b/cpp/arcticdb/async/async_store.hpp index dd184eedb9..a0e4e77cfa 100644 --- a/cpp/arcticdb/async/async_store.hpp +++ b/cpp/arcticdb/async/async_store.hpp @@ -71,6 +71,25 @@ class AsyncStore : public Store { util::check(segment.descriptor().id() == stream_id, "Descriptor id mismatch in atom key {} != {}", stream_id, segment.descriptor().id()); + // ================== INTENTIONAL ERRORS ============================= + // 1. Use After Free + char* data = new char[100]; + std::strcpy(data, "Coverity test - use after free"); + delete[] data; + std::cout << "Data (after delete): " << data << std::endl; // USE_AFTER_FREE + + // 2. Null Pointer Dereference + int* ptr = nullptr; + *ptr = 42; // NULL_POINTER_DEREFERENCE + + // 3. Resource Leak + FILE* file = fopen("temp.txt", "w"); + if (file != nullptr) { + fprintf(file, "Testing resource leak\n"); + // Missing fclose(file); // RESOURCE_LEAK + } + // ====================================================================== + return async::submit_cpu_task(EncodeAtomTask{ key_type, version_id, stream_id, start_index, end_index, current_timestamp(), std::move(segment), codec_, encoding_version_ diff --git a/cpp/arcticdb/version/local_versioned_engine.cpp b/cpp/arcticdb/version/local_versioned_engine.cpp index 0343e95472..f2aeb17496 100644 --- a/cpp/arcticdb/version/local_versioned_engine.cpp +++ b/cpp/arcticdb/version/local_versioned_engine.cpp @@ -723,6 +723,30 @@ VersionedItem LocalVersionedEngine::write_versioned_dataframe_internal( bool allow_sparse, bool validate_index ) { + + // ======================================================= + // INTENTIONAL ERROR + std::vector v(1000); + for (int i = 0; i < 1000; i++) { + v[i] = i; + } + std::vector v1 = std::move(v); + for (size_t i = 0; i < v.size(); ++i) { + std::cout< compact_incomplete_impl( read_incomplete_flags ); + // ======================================================= + // INTENTIONAL ERROR + std::vector v = {1, 2, 3, 4}; + std::vector v1 = std::move(v); + for (auto i : v) { + std::cout<(read_incompletes_result)) { return std::get(read_incompletes_result);