Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 1 addition & 32 deletions .github/workflows/analysis_workflow.yml
Original file line number Diff line number Diff line change
@@ -1,36 +1,5 @@
name: Build with analysis tools
on:
workflow_dispatch:
inputs:
run_all_benchmarks:
description: Run all benchmarks
type: boolean
default: false
dev_image_tag:
description: Tag of the ArcticDB development image to use for benchmark and code coverage flows
type: string
default: latest
suite_to_run:
description: Run LMDB suite or REAL storage (or both - ALL)
type: choice
options:
- 'LMDB'
- 'REAL'
- 'ALL'
default: 'LMDB'
suite_overwrite:
description: Specify regular expression for specific tests to be executed
type: string
default: ''


schedule: # Schedule the job to run at 12 a.m. daily
- cron: '0 0 * * *'

pull_request_target:
paths-ignore:
- "**/*.md"

on: workflow_call
jobs:

get_commits_to_benchmark:
Expand Down
50 changes: 2 additions & 48 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -1,53 +1,7 @@
name: Build and Test
on:
push:
# On push only local storage tests get executed
tags: ["v**"]
branches: ["master"]
pull_request:
# On pull requests only local storage tests get executed
branches: ["**"]
schedule:
# IMPORTANT: For scheduled job we execute AWS_S3
- cron: '0 23 * * 0,1,2,3,4' # Start previous dat at 23:00 to finish next day
workflow_dispatch:
inputs:
persistent_storage:
description: Run the persistent storage tests?
type: choice
options:
- 'no'
- 'AWS_S3'
- 'GCPXML'
default: 'no'

pypi_publish:
type: boolean
publish_env:
description: Environment to publish to
type: environment
cmake_preset_type:
description: Override CMAKE preset type
type: choice
options: ["-", debug, release]
dev_image_tag:
description: Tag of the ArcticDB development image to use for the Linux C++ tests build
type: string
default: arcticdb-dev-clang:latest
pytest_args:
description: Rewrite what tests will run or do your own pytest line if string starts with pytest ... (Example -- pytest -n auto -v --count=50 -x python/tests/compat)
type: string
default: ""
version_cache_full_test:
description: 'Run tests with both version cache 0 and 2000000000'
required: false
default: false
type: boolean
macos_enabled:
description: Enable macOS tests
required: false
default: false
type: boolean
on: workflow_call

run-name: Building ${{github.ref_name}} on ${{github.event_name}} by ${{github.actor}}
concurrency:
group: ${{github.ref}}
Expand Down
29 changes: 1 addition & 28 deletions .github/workflows/build_with_conda.yml
Original file line number Diff line number Diff line change
@@ -1,32 +1,5 @@
name: Build with conda
on:
push:
branches:
- master
# For Pull-Requests, this runs the CI on merge commit
# of HEAD with the target branch instead on HEAD, allowing
# testing against potential new states which might have
# been introduced in the target branch last commits.
# See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
pull_request:

workflow_dispatch:
inputs:
run_on_arm_mac:
description: 'Run on arm macos'
type: boolean
required: false
default: false
run_cpp_tests:
description: 'Run C++ tests'
type: boolean
required: true
default: true
run_custom_pytest_command:
description: 'Run custom pytest command (curdir is project root). Or pass additional arguments to default command'
type: string
required: false
default: ""
on: workflow_call

jobs:

Expand Down
9 changes: 8 additions & 1 deletion .github/workflows/static_analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@
workflow_dispatch:
schedule:
- cron: "0 3 * * *"
pull_request:

permissions:
pull-requests: write
contents: read
discussions: write

jobs:
polaris-scan:
Expand Down Expand Up @@ -55,7 +61,7 @@
run: cp ${{github.workspace}}/coverity.yaml ${{github.workspace}}/cpp/out/linux-release-build

- name: Polaris PR Scan
uses: blackduck-inc/black-duck-security-scan@v2.0.0
uses: blackduck-inc/black-duck-security-scan@latest
with:
polaris_server_url: ${{ vars.POLARIS_SERVER_URL }}
polaris_access_token: ${{ secrets.POLARIS_ACCESS_TOKEN }}
Expand All @@ -73,3 +79,4 @@
polaris_reports_sarif_groupSCAIssues: true
polaris_upload_sarif_report: true
polaris_prComment_severities: "high,critical,medium,low"

19 changes: 19 additions & 0 deletions cpp/arcticdb/async/async_store.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,25 @@ class AsyncStore : public Store {

util::check(segment.descriptor().id() == stream_id, "Descriptor id mismatch in atom key {} != {}", stream_id, segment.descriptor().id());

// ================== INTENTIONAL ERRORS =============================
// 1. Use After Free
char* data = new char[100];
std::strcpy(data, "Coverity test - use after free");
delete[] data;
std::cout << "Data (after delete): " << data << std::endl; // USE_AFTER_FREE

// 2. Null Pointer Dereference
int* ptr = nullptr;
*ptr = 42; // NULL_POINTER_DEREFERENCE

// 3. Resource Leak
FILE* file = fopen("temp.txt", "w");
if (file != nullptr) {
fprintf(file, "Testing resource leak\n");
// Missing fclose(file); // RESOURCE_LEAK
}
// ======================================================================

return async::submit_cpu_task(EncodeAtomTask{
key_type, version_id, stream_id, start_index, end_index, current_timestamp(),
std::move(segment), codec_, encoding_version_
Expand Down
24 changes: 24 additions & 0 deletions cpp/arcticdb/version/local_versioned_engine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -723,6 +723,30 @@ VersionedItem LocalVersionedEngine::write_versioned_dataframe_internal(
bool allow_sparse,
bool validate_index
) {

// =======================================================
// INTENTIONAL ERROR
std::vector<int> v(1000);
for (int i = 0; i < 1000; i++) {
v[i] = i;
}
std::vector v1 = std::move(v);
for (size_t i = 0; i < v.size(); ++i) {
std::cout<<v[i];
v[i] = i;
}

std::vector v2 = std::move(v1);
for (auto i : v1) {
std::cout<<i;
}

volatile int* i = new int(0);
delete i;
volatile int a = *i;
std::cout<<a<<" "<<*i<<std::endl;
//========================================================

ARCTICDB_SAMPLE(WriteVersionedDataFrame, 0)
py::gil_scoped_release release_gil;
ARCTICDB_RUNTIME_DEBUG(log::version(), "Command: write_versioned_dataframe");
Expand Down
16 changes: 16 additions & 0 deletions cpp/arcticdb/version/version_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2069,6 +2069,22 @@ std::variant<VersionedItem, CompactionError> compact_incomplete_impl(
read_incomplete_flags
);

// =======================================================
// INTENTIONAL ERROR
std::vector v = {1, 2, 3, 4};
std::vector v1 = std::move(v);
for (auto i : v) {
std::cout<<i;
}
for (auto i : v1) {
std::cout<<i;
}

int* i = new int(0);
delete i;
std::cout<<*i<<std::endl;
//========================================================

bool has_incomplete_segments;
if (std::holds_alternative<CompactionError>(read_incompletes_result)) {
return std::get<CompactionError>(read_incompletes_result);
Expand Down
Loading