Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Fix errors and compiler warnings on verification platforms #420

Merged
merged 6 commits into from
Apr 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions dev/benchmarks/c/array_benchmark.cc
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ static void BenchmarkArrayViewGetString(benchmark::State& state) {

int64_t n_alphabets = n_values / alphabet.size() + 1;
std::vector<char> data(alphabet.size() * n_alphabets);
for (int64_t data_pos = 0; data_pos < data.size(); data_pos += alphabet.size()) {
for (size_t data_pos = 0; data_pos < data.size(); data_pos += alphabet.size()) {
memcpy(data.data() + data_pos, alphabet.data(), alphabet.size());
}

Expand Down Expand Up @@ -262,7 +262,7 @@ static ArrowErrorCode CreateAndAppendToArrayInt(ArrowArray* array,
NANOARROW_RETURN_NOT_OK(ArrowArrayInitFromType(array, type));
NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array));

for (int64_t i = 0; i < values.size(); i++) {
for (size_t i = 0; i < values.size(); i++) {
NANOARROW_RETURN_NOT_OK(ArrowArrayAppendInt(array, values[i]));
}

Expand Down Expand Up @@ -293,7 +293,7 @@ static void BenchmarkArrayAppendString(benchmark::State& state) {
int64_t value_size = 7;

std::vector<std::string> values(n_values);
int64_t alphabet_pos = 0;
size_t alphabet_pos = 0;
for (std::string& value : values) {
if ((alphabet_pos + value_size) >= kAlphabet.size()) {
alphabet_pos = 0;
Expand Down Expand Up @@ -361,7 +361,7 @@ static ArrowErrorCode CreateAndAppendIntWithNulls(ArrowArray* array,
NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array));
CType non_null_value = std::numeric_limits<CType>::max() / 2;

for (int64_t i = 0; i < validity.size(); i++) {
for (size_t i = 0; i < validity.size(); i++) {
if (validity[i]) {
NANOARROW_RETURN_NOT_OK(ArrowArrayAppendInt(array, non_null_value));
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\
__## NS ## field_present(ID, t__tmp)\
static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\
{ T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\
if (u__tmp.type == 0) return u__tmp; u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
if (u__tmp.type == 0) { return u__tmp; } u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\
static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\
{ return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\

Expand All @@ -200,7 +200,7 @@ static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t
{ T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\
FLATCC_ASSERT(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\
/* Unknown type is treated as NONE for schema evolution. */\
if (u__tmp.type == 0) return u__tmp;\
if (u__tmp.type == 0) { return u__tmp; }\
u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\
static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\
{ return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\
Expand Down Expand Up @@ -810,7 +810,7 @@ static inline N ## _union_vec_ref_t N ## _vec_clone(NS ## builder_t *B, N ##_uni
_uvref.type = flatcc_builder_refmap_find(B, vec.type); _uvref.value = flatcc_builder_refmap_find(B, vec.value);\
_len = N ## _union_vec_len(vec); if (_uvref.type == 0) {\
_uvref.type = flatcc_builder_refmap_insert(B, vec.type, (flatcc_builder_create_type_vector(B, vec.type, _len))); }\
if (_uvref.type == 0) return _ret; if (_uvref.value == 0) {\
if (_uvref.type == 0) { return _ret; } if (_uvref.value == 0) {\
if (flatcc_builder_start_offset_vector(B)) return _ret;\
for (_i = 0; _i < _len; ++_i) { _uref = N ## _clone(B, N ## _union_vec_at(vec, _i));\
if (!_uref.value || !(flatcc_builder_offset_vector_push(B, _uref.value))) return _ret; }\
Expand Down Expand Up @@ -915,11 +915,11 @@ __flatbuffers_build_offset_vector(NS, NS ## string)
static inline T *N ## _array_copy(T *p, const T *p2, size_t n)\
{ memcpy(p, p2, n * sizeof(T)); return p; }\
static inline T *N ## _array_copy_from_pe(T *p, const T *p2, size_t n)\
{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\
for (i = 0; i < n; ++i) N ## _copy_from_pe(&p[i], &p2[i]); return p; }\
{ size_t i; if (NS ## is_native_pe()) { memcpy(p, p2, n * sizeof(T)); } else\
{ for (i = 0; i < n; ++i) { N ## _copy_from_pe(&p[i], &p2[i]); } } return p; }\
static inline T *N ## _array_copy_to_pe(T *p, const T *p2, size_t n)\
{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\
for (i = 0; i < n; ++i) N ## _copy_to_pe(&p[i], &p2[i]); return p; }
{ size_t i; if (NS ## is_native_pe()) { memcpy(p, p2, n * sizeof(T)); } else\
{ for (i = 0; i < n; ++i) { N ## _copy_to_pe(&p[i], &p2[i]); } } return p; }
#define __flatbuffers_define_scalar_primitives(NS, N, T)\
static inline T *N ## _from_pe(T *p) { return __ ## NS ## from_pe(p, N); }\
static inline T *N ## _to_pe(T *p) { return __ ## NS ## to_pe(p, N); }\
Expand Down
7 changes: 4 additions & 3 deletions python/tests/test_c_buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,11 @@ def test_c_buffer_constructor():
def test_c_buffer_unsupported_format():
empty = CBuffer.empty()

with pytest.raises(ValueError, match="Can't convert format '>i' to Arrow type"):
if sys.byteorder == "little":
if sys.byteorder == "little":
with pytest.raises(ValueError, match="Can't convert format '>i' to Arrow type"):
empty._set_format(">i")
else:
else:
with pytest.raises(ValueError, match="Can't convert format '<i' to Arrow type"):
empty._set_format("<i")

with pytest.raises(ValueError, match=r"Unsupported Arrow type_id"):
Expand Down
4 changes: 2 additions & 2 deletions src/nanoarrow/array_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -848,9 +848,9 @@ TEST(ArrayTest, ArrayTestAppendToFixedSizeBinaryArray) {
ASSERT_EQ(ArrowArrayReserve(&array, 5), NANOARROW_OK);
EXPECT_EQ(ArrowArrayBuffer(&array, 1)->capacity_bytes, 5 * 5);

EXPECT_EQ(ArrowArrayAppendBytes(&array, {"12345", 5}), NANOARROW_OK);
EXPECT_EQ(ArrowArrayAppendBytes(&array, {{"12345"}, 5}), NANOARROW_OK);
EXPECT_EQ(ArrowArrayAppendNull(&array, 2), NANOARROW_OK);
EXPECT_EQ(ArrowArrayAppendBytes(&array, {"67890", 5}), NANOARROW_OK);
EXPECT_EQ(ArrowArrayAppendBytes(&array, {{"67890"}, 5}), NANOARROW_OK);
EXPECT_EQ(ArrowArrayAppendEmpty(&array, 1), NANOARROW_OK);
EXPECT_EQ(ArrowArrayFinishBuildingDefault(&array, nullptr), NANOARROW_OK);

Expand Down
21 changes: 12 additions & 9 deletions src/nanoarrow/buffer_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
static uint8_t* TestAllocatorReallocate(struct ArrowBufferAllocator* allocator,
uint8_t* ptr, int64_t old_size,
int64_t new_size) {
NANOARROW_UNUSED(allocator);
uint8_t* new_ptr = reinterpret_cast<uint8_t*>(malloc(new_size));

int64_t copy_size = std::min<int64_t>(old_size, new_size);
Expand All @@ -45,6 +46,8 @@ static uint8_t* TestAllocatorReallocate(struct ArrowBufferAllocator* allocator,

static void TestAllocatorFree(struct ArrowBufferAllocator* allocator, uint8_t* ptr,
int64_t size) {
NANOARROW_UNUSED(allocator);
NANOARROW_UNUSED(size);
free(ptr);
}

Expand Down Expand Up @@ -188,31 +191,31 @@ TEST(BufferTest, BufferTestAppendHelpers) {
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendUInt8(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<uint8_t*>(buffer.data)[0], 123);
EXPECT_EQ(reinterpret_cast<uint8_t*>(buffer.data)[0], 123U);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendInt16(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<int16_t*>(buffer.data)[0], 123);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendUInt16(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<uint16_t*>(buffer.data)[0], 123);
EXPECT_EQ(reinterpret_cast<uint16_t*>(buffer.data)[0], 123U);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendInt32(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<int32_t*>(buffer.data)[0], 123);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendUInt32(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<uint32_t*>(buffer.data)[0], 123);
EXPECT_EQ(reinterpret_cast<uint32_t*>(buffer.data)[0], 123U);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendInt64(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<int64_t*>(buffer.data)[0], 123);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendUInt64(&buffer, 123), NANOARROW_OK);
EXPECT_EQ(reinterpret_cast<uint64_t*>(buffer.data)[0], 123);
EXPECT_EQ(reinterpret_cast<uint64_t*>(buffer.data)[0], 123U);
ArrowBufferReset(&buffer);

EXPECT_EQ(ArrowBufferAppendDouble(&buffer, 123), NANOARROW_OK);
Expand Down Expand Up @@ -241,7 +244,7 @@ TEST(BitmapTest, BitmapTestElement) {
uint8_t bitmap[10];

memset(bitmap, 0xff, sizeof(bitmap));
for (int i = 0; i < sizeof(bitmap) * 8; i++) {
for (size_t i = 0; i < sizeof(bitmap) * 8; i++) {
EXPECT_EQ(ArrowBitGet(bitmap, i), 1);
}

Expand All @@ -256,7 +259,7 @@ TEST(BitmapTest, BitmapTestElement) {
EXPECT_EQ(ArrowBitGet(bitmap, 16 + 7), 1);

memset(bitmap, 0x00, sizeof(bitmap));
for (int i = 0; i < sizeof(bitmap) * 8; i++) {
for (size_t i = 0; i < sizeof(bitmap) * 8; i++) {
EXPECT_EQ(ArrowBitGet(bitmap, i), 0);
}

Expand All @@ -271,7 +274,7 @@ TEST(BitmapTest, BitmapTestElement) {
EXPECT_EQ(ArrowBitGet(bitmap, 16 + 7), 0);
}

template <int offset, int length>
template <int offset, size_t length>
void TestArrowBitmapUnpackUnsafe(const uint8_t* bitmap, std::vector<int8_t> expected) {
int8_t out[length];
int32_t out32[length];
Expand All @@ -281,12 +284,12 @@ void TestArrowBitmapUnpackUnsafe(const uint8_t* bitmap, std::vector<int8_t> expe
ASSERT_EQ(length, expected.size());

ArrowBitsUnpackInt8(bitmap, offset, length, out);
for (int i = 0; i < length; i++) {
for (size_t i = 0; i < length; i++) {
EXPECT_EQ(out[i], expected[i]);
}

ArrowBitsUnpackInt32(bitmap, offset, length, out32);
for (int i = 0; i < length; i++) {
for (size_t i = 0; i < length; i++) {
EXPECT_EQ(out32[i], expected[i]);
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/nanoarrow/integration/c_data_integration.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ static int64_t kBytesAllocated = 0;

static uint8_t* IntegrationTestReallocate(ArrowBufferAllocator* allocator, uint8_t* ptr,
int64_t old_size, int64_t new_size) {
NANOARROW_UNUSED(allocator);
ArrowBufferAllocator default_allocator = ArrowBufferAllocatorDefault();
kBytesAllocated -= old_size;
uint8_t* out =
Expand All @@ -41,6 +42,7 @@ static uint8_t* IntegrationTestReallocate(ArrowBufferAllocator* allocator, uint8

static void IntegrationTestFree(struct ArrowBufferAllocator* allocator, uint8_t* ptr,
int64_t size) {
NANOARROW_UNUSED(allocator);
ArrowBufferAllocator default_allocator = ArrowBufferAllocatorDefault();
kBytesAllocated -= size;
default_allocator.free(&default_allocator, ptr, size);
Expand Down
2 changes: 2 additions & 0 deletions src/nanoarrow/nanoarrow.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,8 @@ class Unique {
template <typename T>
static inline void DeallocateWrappedBuffer(struct ArrowBufferAllocator* allocator,
uint8_t* ptr, int64_t size) {
NANOARROW_UNUSED(ptr);
NANOARROW_UNUSED(size);
auto obj = reinterpret_cast<T*>(allocator->private_data);
delete obj;
}
Expand Down
2 changes: 2 additions & 0 deletions src/nanoarrow/nanoarrow_hpp_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
// specific language governing permissions and limitations
// under the License.

#include <array>

#include <gtest/gtest.h>

#include "nanoarrow/nanoarrow.hpp"
Expand Down
22 changes: 13 additions & 9 deletions src/nanoarrow/nanoarrow_testing.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -1105,7 +1105,7 @@ class TestingJSONReader {
} else if (num_batch == kNumBatchReadAll) {
batch_ids.resize(batches.size());
std::iota(batch_ids.begin(), batch_ids.end(), 0);
} else if (num_batch >= 0 && num_batch < batches.size()) {
} else if (num_batch >= 0 && static_cast<size_t>(num_batch) < batches.size()) {
batch_ids.push_back(num_batch);
} else {
ArrowErrorSet(error, "Expected num_batch between 0 and %d but got %d",
Expand Down Expand Up @@ -1887,8 +1887,9 @@ class TestingJSONReader {
const auto& columns = value["columns"];
NANOARROW_RETURN_NOT_OK(
Check(columns.is_array(), error, "RecordBatch columns must be array"));
NANOARROW_RETURN_NOT_OK(Check(columns.size() == array_view->n_children, error,
"RecordBatch children has incorrect size"));
NANOARROW_RETURN_NOT_OK(
Check(columns.size() == static_cast<size_t>(array_view->n_children), error,
"RecordBatch children has incorrect size"));

for (int64_t i = 0; i < array_view->n_children; i++) {
NANOARROW_RETURN_NOT_OK(SetArrayColumn(columns[i], schema->children[i],
Expand Down Expand Up @@ -1987,8 +1988,9 @@ class TestingJSONReader {
const auto& children = value["children"];
NANOARROW_RETURN_NOT_OK(
Check(children.is_array(), error, error_prefix + "children must be array"));
NANOARROW_RETURN_NOT_OK(Check(children.size() == array_view->n_children, error,
error_prefix + "children has incorrect size"));
NANOARROW_RETURN_NOT_OK(
Check(children.size() == static_cast<size_t>(array_view->n_children), error,
error_prefix + "children has incorrect size"));

for (int64_t i = 0; i < array_view->n_children; i++) {
NANOARROW_RETURN_NOT_OK(SetArrayColumn(children[i], schema->children[i],
Expand Down Expand Up @@ -2272,7 +2274,8 @@ class TestingJSONReader {
// Check offsets against values
const T* expected_offset = reinterpret_cast<const T*>(offsets->data);
NANOARROW_RETURN_NOT_OK(Check(
offsets->size_bytes == ((value.size() + 1) * sizeof(T)), error,
static_cast<size_t>(offsets->size_bytes) == ((value.size() + 1) * sizeof(T)),
error,
"Expected offset buffer with " + std::to_string(value.size()) + " elements"));
NANOARROW_RETURN_NOT_OK(
Check(*expected_offset++ == 0, error, "first offset must be zero"));
Expand Down Expand Up @@ -2310,7 +2313,8 @@ class TestingJSONReader {
// Check offsets against values if not fixed size
const T* expected_offset = reinterpret_cast<const T*>(offsets->data);
NANOARROW_RETURN_NOT_OK(Check(
offsets->size_bytes == ((value.size() + 1) * sizeof(T)), error,
static_cast<size_t>(offsets->size_bytes) == ((value.size() + 1) * sizeof(T)),
error,
"Expected offset buffer with " + std::to_string(value.size()) + " elements"));
NANOARROW_RETURN_NOT_OK(
Check(*expected_offset++ == 0, error, "first offset must be zero"));
Expand Down Expand Up @@ -2355,7 +2359,7 @@ class TestingJSONReader {
Check(item.is_string(), error, "binary data buffer item must be string"));
auto item_str = item.get<std::string>();

int64_t item_size_bytes = item_str.size() / 2;
size_t item_size_bytes = item_str.size() / 2;
NANOARROW_RETURN_NOT_OK(Check((item_size_bytes * 2) == item_str.size(), error,
"binary data buffer item must have even size"));

Expand Down Expand Up @@ -2502,7 +2506,7 @@ class TestingJSONComparison {

public:
/// \brief Returns the number of differences found by the previous call
size_t num_differences() const { return differences_.size(); }
int64_t num_differences() const { return differences_.size(); }

/// \brief Dump a human-readable summary of differences to out
void WriteDifferences(std::ostream& out) {
Expand Down
Loading
Loading