diff --git a/dev/benchmarks/c/array_benchmark.cc b/dev/benchmarks/c/array_benchmark.cc index d81b80d46..9e028fcb2 100644 --- a/dev/benchmarks/c/array_benchmark.cc +++ b/dev/benchmarks/c/array_benchmark.cc @@ -231,7 +231,7 @@ static void BenchmarkArrayViewGetString(benchmark::State& state) { int64_t n_alphabets = n_values / alphabet.size() + 1; std::vector data(alphabet.size() * n_alphabets); - for (int64_t data_pos = 0; data_pos < data.size(); data_pos += alphabet.size()) { + for (size_t data_pos = 0; data_pos < data.size(); data_pos += alphabet.size()) { memcpy(data.data() + data_pos, alphabet.data(), alphabet.size()); } @@ -262,7 +262,7 @@ static ArrowErrorCode CreateAndAppendToArrayInt(ArrowArray* array, NANOARROW_RETURN_NOT_OK(ArrowArrayInitFromType(array, type)); NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array)); - for (int64_t i = 0; i < values.size(); i++) { + for (size_t i = 0; i < values.size(); i++) { NANOARROW_RETURN_NOT_OK(ArrowArrayAppendInt(array, values[i])); } @@ -293,7 +293,7 @@ static void BenchmarkArrayAppendString(benchmark::State& state) { int64_t value_size = 7; std::vector values(n_values); - int64_t alphabet_pos = 0; + size_t alphabet_pos = 0; for (std::string& value : values) { if ((alphabet_pos + value_size) >= kAlphabet.size()) { alphabet_pos = 0; @@ -361,7 +361,7 @@ static ArrowErrorCode CreateAndAppendIntWithNulls(ArrowArray* array, NANOARROW_RETURN_NOT_OK(ArrowArrayStartAppending(array)); CType non_null_value = std::numeric_limits::max() / 2; - for (int64_t i = 0; i < validity.size(); i++) { + for (size_t i = 0; i < validity.size(); i++) { if (validity[i]) { NANOARROW_RETURN_NOT_OK(ArrowArrayAppendInt(array, non_null_value)); } else { diff --git a/extensions/nanoarrow_ipc/src/nanoarrow/nanoarrow_ipc_flatcc_generated.h b/extensions/nanoarrow_ipc/src/nanoarrow/nanoarrow_ipc_flatcc_generated.h index 93a540e1d..8237e99b9 100644 --- a/extensions/nanoarrow_ipc/src/nanoarrow/nanoarrow_ipc_flatcc_generated.h +++ b/extensions/nanoarrow_ipc/src/nanoarrow/nanoarrow_ipc_flatcc_generated.h @@ -189,7 +189,7 @@ static inline int N ## _ ## NK ## _is_present(N ## _table_t t__tmp)\ __## NS ## field_present(ID, t__tmp)\ static inline T ## _union_t N ## _ ## NK ## _union(N ## _table_t t__tmp)\ { T ## _union_t u__tmp = { 0, 0 }; u__tmp.type = N ## _ ## NK ## _type_get(t__tmp);\ - if (u__tmp.type == 0) return u__tmp; u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\ + if (u__tmp.type == 0) { return u__tmp; } u__tmp.value = N ## _ ## NK ## _get(t__tmp); return u__tmp; }\ static inline NS ## string_t N ## _ ## NK ## _as_string(N ## _table_t t__tmp)\ { return NS ## string_cast_from_generic(N ## _ ## NK ## _get(t__tmp)); }\ @@ -200,7 +200,7 @@ static inline T ## _union_t T ## _union_vec_at(T ## _union_vec_t uv__tmp, size_t { T ## _union_t u__tmp = { 0, 0 }; size_t n__tmp = NS ## vec_len(uv__tmp.type);\ FLATCC_ASSERT(n__tmp > (i__tmp) && "index out of range"); u__tmp.type = uv__tmp.type[i__tmp];\ /* Unknown type is treated as NONE for schema evolution. */\ - if (u__tmp.type == 0) return u__tmp;\ + if (u__tmp.type == 0) { return u__tmp; }\ u__tmp.value = NS ## generic_vec_at(uv__tmp.value, i__tmp); return u__tmp; }\ static inline NS ## string_t T ## _union_vec_at_as_string(T ## _union_vec_t uv__tmp, size_t i__tmp)\ { return (NS ## string_t) NS ## generic_vec_at_as_string(uv__tmp.value, i__tmp); }\ @@ -810,7 +810,7 @@ static inline N ## _union_vec_ref_t N ## _vec_clone(NS ## builder_t *B, N ##_uni _uvref.type = flatcc_builder_refmap_find(B, vec.type); _uvref.value = flatcc_builder_refmap_find(B, vec.value);\ _len = N ## _union_vec_len(vec); if (_uvref.type == 0) {\ _uvref.type = flatcc_builder_refmap_insert(B, vec.type, (flatcc_builder_create_type_vector(B, vec.type, _len))); }\ - if (_uvref.type == 0) return _ret; if (_uvref.value == 0) {\ + if (_uvref.type == 0) { return _ret; } if (_uvref.value == 0) {\ if (flatcc_builder_start_offset_vector(B)) return _ret;\ for (_i = 0; _i < _len; ++_i) { _uref = N ## _clone(B, N ## _union_vec_at(vec, _i));\ if (!_uref.value || !(flatcc_builder_offset_vector_push(B, _uref.value))) return _ret; }\ @@ -915,11 +915,11 @@ __flatbuffers_build_offset_vector(NS, NS ## string) static inline T *N ## _array_copy(T *p, const T *p2, size_t n)\ { memcpy(p, p2, n * sizeof(T)); return p; }\ static inline T *N ## _array_copy_from_pe(T *p, const T *p2, size_t n)\ -{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\ - for (i = 0; i < n; ++i) N ## _copy_from_pe(&p[i], &p2[i]); return p; }\ +{ size_t i; if (NS ## is_native_pe()) { memcpy(p, p2, n * sizeof(T)); } else\ + { for (i = 0; i < n; ++i) { N ## _copy_from_pe(&p[i], &p2[i]); } } return p; }\ static inline T *N ## _array_copy_to_pe(T *p, const T *p2, size_t n)\ -{ size_t i; if (NS ## is_native_pe()) memcpy(p, p2, n * sizeof(T)); else\ - for (i = 0; i < n; ++i) N ## _copy_to_pe(&p[i], &p2[i]); return p; } +{ size_t i; if (NS ## is_native_pe()) { memcpy(p, p2, n * sizeof(T)); } else\ + { for (i = 0; i < n; ++i) { N ## _copy_to_pe(&p[i], &p2[i]); } } return p; } #define __flatbuffers_define_scalar_primitives(NS, N, T)\ static inline T *N ## _from_pe(T *p) { return __ ## NS ## from_pe(p, N); }\ static inline T *N ## _to_pe(T *p) { return __ ## NS ## to_pe(p, N); }\ diff --git a/python/tests/test_c_buffer.py b/python/tests/test_c_buffer.py index b3105e1da..51944723b 100644 --- a/python/tests/test_c_buffer.py +++ b/python/tests/test_c_buffer.py @@ -49,10 +49,11 @@ def test_c_buffer_constructor(): def test_c_buffer_unsupported_format(): empty = CBuffer.empty() - with pytest.raises(ValueError, match="Can't convert format '>i' to Arrow type"): - if sys.byteorder == "little": + if sys.byteorder == "little": + with pytest.raises(ValueError, match="Can't convert format '>i' to Arrow type"): empty._set_format(">i") - else: + else: + with pytest.raises(ValueError, match="Can't convert format 'capacity_bytes, 5 * 5); - EXPECT_EQ(ArrowArrayAppendBytes(&array, {"12345", 5}), NANOARROW_OK); + EXPECT_EQ(ArrowArrayAppendBytes(&array, {{"12345"}, 5}), NANOARROW_OK); EXPECT_EQ(ArrowArrayAppendNull(&array, 2), NANOARROW_OK); - EXPECT_EQ(ArrowArrayAppendBytes(&array, {"67890", 5}), NANOARROW_OK); + EXPECT_EQ(ArrowArrayAppendBytes(&array, {{"67890"}, 5}), NANOARROW_OK); EXPECT_EQ(ArrowArrayAppendEmpty(&array, 1), NANOARROW_OK); EXPECT_EQ(ArrowArrayFinishBuildingDefault(&array, nullptr), NANOARROW_OK); diff --git a/src/nanoarrow/buffer_test.cc b/src/nanoarrow/buffer_test.cc index 4cf196b5d..5c14161db 100644 --- a/src/nanoarrow/buffer_test.cc +++ b/src/nanoarrow/buffer_test.cc @@ -29,6 +29,7 @@ static uint8_t* TestAllocatorReallocate(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t old_size, int64_t new_size) { + NANOARROW_UNUSED(allocator); uint8_t* new_ptr = reinterpret_cast(malloc(new_size)); int64_t copy_size = std::min(old_size, new_size); @@ -45,6 +46,8 @@ static uint8_t* TestAllocatorReallocate(struct ArrowBufferAllocator* allocator, static void TestAllocatorFree(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t size) { + NANOARROW_UNUSED(allocator); + NANOARROW_UNUSED(size); free(ptr); } @@ -188,7 +191,7 @@ TEST(BufferTest, BufferTestAppendHelpers) { ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendUInt8(&buffer, 123), NANOARROW_OK); - EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123); + EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123U); ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendInt16(&buffer, 123), NANOARROW_OK); @@ -196,7 +199,7 @@ TEST(BufferTest, BufferTestAppendHelpers) { ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendUInt16(&buffer, 123), NANOARROW_OK); - EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123); + EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123U); ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendInt32(&buffer, 123), NANOARROW_OK); @@ -204,7 +207,7 @@ TEST(BufferTest, BufferTestAppendHelpers) { ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendUInt32(&buffer, 123), NANOARROW_OK); - EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123); + EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123U); ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendInt64(&buffer, 123), NANOARROW_OK); @@ -212,7 +215,7 @@ TEST(BufferTest, BufferTestAppendHelpers) { ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendUInt64(&buffer, 123), NANOARROW_OK); - EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123); + EXPECT_EQ(reinterpret_cast(buffer.data)[0], 123U); ArrowBufferReset(&buffer); EXPECT_EQ(ArrowBufferAppendDouble(&buffer, 123), NANOARROW_OK); @@ -241,7 +244,7 @@ TEST(BitmapTest, BitmapTestElement) { uint8_t bitmap[10]; memset(bitmap, 0xff, sizeof(bitmap)); - for (int i = 0; i < sizeof(bitmap) * 8; i++) { + for (size_t i = 0; i < sizeof(bitmap) * 8; i++) { EXPECT_EQ(ArrowBitGet(bitmap, i), 1); } @@ -256,7 +259,7 @@ TEST(BitmapTest, BitmapTestElement) { EXPECT_EQ(ArrowBitGet(bitmap, 16 + 7), 1); memset(bitmap, 0x00, sizeof(bitmap)); - for (int i = 0; i < sizeof(bitmap) * 8; i++) { + for (size_t i = 0; i < sizeof(bitmap) * 8; i++) { EXPECT_EQ(ArrowBitGet(bitmap, i), 0); } @@ -271,7 +274,7 @@ TEST(BitmapTest, BitmapTestElement) { EXPECT_EQ(ArrowBitGet(bitmap, 16 + 7), 0); } -template +template void TestArrowBitmapUnpackUnsafe(const uint8_t* bitmap, std::vector expected) { int8_t out[length]; int32_t out32[length]; @@ -281,12 +284,12 @@ void TestArrowBitmapUnpackUnsafe(const uint8_t* bitmap, std::vector expe ASSERT_EQ(length, expected.size()); ArrowBitsUnpackInt8(bitmap, offset, length, out); - for (int i = 0; i < length; i++) { + for (size_t i = 0; i < length; i++) { EXPECT_EQ(out[i], expected[i]); } ArrowBitsUnpackInt32(bitmap, offset, length, out32); - for (int i = 0; i < length; i++) { + for (size_t i = 0; i < length; i++) { EXPECT_EQ(out32[i], expected[i]); } } diff --git a/src/nanoarrow/integration/c_data_integration.cc b/src/nanoarrow/integration/c_data_integration.cc index 6ab09ea71..3660af7c3 100644 --- a/src/nanoarrow/integration/c_data_integration.cc +++ b/src/nanoarrow/integration/c_data_integration.cc @@ -28,6 +28,7 @@ static int64_t kBytesAllocated = 0; static uint8_t* IntegrationTestReallocate(ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t old_size, int64_t new_size) { + NANOARROW_UNUSED(allocator); ArrowBufferAllocator default_allocator = ArrowBufferAllocatorDefault(); kBytesAllocated -= old_size; uint8_t* out = @@ -41,6 +42,7 @@ static uint8_t* IntegrationTestReallocate(ArrowBufferAllocator* allocator, uint8 static void IntegrationTestFree(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t size) { + NANOARROW_UNUSED(allocator); ArrowBufferAllocator default_allocator = ArrowBufferAllocatorDefault(); kBytesAllocated -= size; default_allocator.free(&default_allocator, ptr, size); diff --git a/src/nanoarrow/nanoarrow.hpp b/src/nanoarrow/nanoarrow.hpp index 09a031511..aa8b248ae 100644 --- a/src/nanoarrow/nanoarrow.hpp +++ b/src/nanoarrow/nanoarrow.hpp @@ -244,6 +244,8 @@ class Unique { template static inline void DeallocateWrappedBuffer(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t size) { + NANOARROW_UNUSED(ptr); + NANOARROW_UNUSED(size); auto obj = reinterpret_cast(allocator->private_data); delete obj; } diff --git a/src/nanoarrow/nanoarrow_hpp_test.cc b/src/nanoarrow/nanoarrow_hpp_test.cc index 4bbb8e9a6..779598593 100644 --- a/src/nanoarrow/nanoarrow_hpp_test.cc +++ b/src/nanoarrow/nanoarrow_hpp_test.cc @@ -15,6 +15,8 @@ // specific language governing permissions and limitations // under the License. +#include + #include #include "nanoarrow/nanoarrow.hpp" diff --git a/src/nanoarrow/nanoarrow_testing.hpp b/src/nanoarrow/nanoarrow_testing.hpp index 4e39ee09b..a7602b700 100644 --- a/src/nanoarrow/nanoarrow_testing.hpp +++ b/src/nanoarrow/nanoarrow_testing.hpp @@ -1105,7 +1105,7 @@ class TestingJSONReader { } else if (num_batch == kNumBatchReadAll) { batch_ids.resize(batches.size()); std::iota(batch_ids.begin(), batch_ids.end(), 0); - } else if (num_batch >= 0 && num_batch < batches.size()) { + } else if (num_batch >= 0 && static_cast(num_batch) < batches.size()) { batch_ids.push_back(num_batch); } else { ArrowErrorSet(error, "Expected num_batch between 0 and %d but got %d", @@ -1887,8 +1887,9 @@ class TestingJSONReader { const auto& columns = value["columns"]; NANOARROW_RETURN_NOT_OK( Check(columns.is_array(), error, "RecordBatch columns must be array")); - NANOARROW_RETURN_NOT_OK(Check(columns.size() == array_view->n_children, error, - "RecordBatch children has incorrect size")); + NANOARROW_RETURN_NOT_OK( + Check(columns.size() == static_cast(array_view->n_children), error, + "RecordBatch children has incorrect size")); for (int64_t i = 0; i < array_view->n_children; i++) { NANOARROW_RETURN_NOT_OK(SetArrayColumn(columns[i], schema->children[i], @@ -1987,8 +1988,9 @@ class TestingJSONReader { const auto& children = value["children"]; NANOARROW_RETURN_NOT_OK( Check(children.is_array(), error, error_prefix + "children must be array")); - NANOARROW_RETURN_NOT_OK(Check(children.size() == array_view->n_children, error, - error_prefix + "children has incorrect size")); + NANOARROW_RETURN_NOT_OK( + Check(children.size() == static_cast(array_view->n_children), error, + error_prefix + "children has incorrect size")); for (int64_t i = 0; i < array_view->n_children; i++) { NANOARROW_RETURN_NOT_OK(SetArrayColumn(children[i], schema->children[i], @@ -2272,7 +2274,8 @@ class TestingJSONReader { // Check offsets against values const T* expected_offset = reinterpret_cast(offsets->data); NANOARROW_RETURN_NOT_OK(Check( - offsets->size_bytes == ((value.size() + 1) * sizeof(T)), error, + static_cast(offsets->size_bytes) == ((value.size() + 1) * sizeof(T)), + error, "Expected offset buffer with " + std::to_string(value.size()) + " elements")); NANOARROW_RETURN_NOT_OK( Check(*expected_offset++ == 0, error, "first offset must be zero")); @@ -2310,7 +2313,8 @@ class TestingJSONReader { // Check offsets against values if not fixed size const T* expected_offset = reinterpret_cast(offsets->data); NANOARROW_RETURN_NOT_OK(Check( - offsets->size_bytes == ((value.size() + 1) * sizeof(T)), error, + static_cast(offsets->size_bytes) == ((value.size() + 1) * sizeof(T)), + error, "Expected offset buffer with " + std::to_string(value.size()) + " elements")); NANOARROW_RETURN_NOT_OK( Check(*expected_offset++ == 0, error, "first offset must be zero")); @@ -2355,7 +2359,7 @@ class TestingJSONReader { Check(item.is_string(), error, "binary data buffer item must be string")); auto item_str = item.get(); - int64_t item_size_bytes = item_str.size() / 2; + size_t item_size_bytes = item_str.size() / 2; NANOARROW_RETURN_NOT_OK(Check((item_size_bytes * 2) == item_str.size(), error, "binary data buffer item must have even size")); @@ -2502,7 +2506,7 @@ class TestingJSONComparison { public: /// \brief Returns the number of differences found by the previous call - size_t num_differences() const { return differences_.size(); } + int64_t num_differences() const { return differences_.size(); } /// \brief Dump a human-readable summary of differences to out void WriteDifferences(std::ostream& out) { diff --git a/src/nanoarrow/nanoarrow_testing_test.cc b/src/nanoarrow/nanoarrow_testing_test.cc index ce1e873e3..f7b484442 100644 --- a/src/nanoarrow/nanoarrow_testing_test.cc +++ b/src/nanoarrow/nanoarrow_testing_test.cc @@ -15,7 +15,6 @@ // specific language governing permissions and limitations // under the License. -#include #include #include @@ -39,21 +38,24 @@ ArrowErrorCode WriteColumnJSON(std::ostream& out, TestingJSONWriter& writer, ArrowErrorCode WriteSchemaJSON(std::ostream& out, TestingJSONWriter& writer, const ArrowSchema* schema, ArrowArrayView* array_view) { + NANOARROW_UNUSED(array_view); return writer.WriteSchema(out, schema); } ArrowErrorCode WriteFieldJSON(std::ostream& out, TestingJSONWriter& writer, const ArrowSchema* schema, ArrowArrayView* array_view) { + NANOARROW_UNUSED(array_view); return writer.WriteField(out, schema); } ArrowErrorCode WriteTypeJSON(std::ostream& out, TestingJSONWriter& writer, const ArrowSchema* schema, ArrowArrayView* array_view) { + NANOARROW_UNUSED(array_view); return writer.WriteType(out, schema); } -void TestWriteJSON(std::function type_expr, - std::function append_expr, +void TestWriteJSON(ArrowErrorCode (*type_expr)(ArrowSchema*), + ArrowErrorCode (*append_expr)(ArrowArray*), ArrowErrorCode (*test_expr)(std::ostream&, TestingJSONWriter&, const ArrowSchema*, ArrowArrayView*), const std::string& expected_json, @@ -61,11 +63,16 @@ void TestWriteJSON(std::function type_expr, std::stringstream ss; nanoarrow::UniqueSchema schema; - ASSERT_EQ(type_expr(schema.get()), NANOARROW_OK); + if (type_expr != nullptr) { + ASSERT_EQ(type_expr(schema.get()), NANOARROW_OK); + } + nanoarrow::UniqueArray array; ASSERT_EQ(ArrowArrayInitFromSchema(array.get(), schema.get(), nullptr), NANOARROW_OK); ASSERT_EQ(ArrowArrayStartAppending(array.get()), NANOARROW_OK); - ASSERT_EQ(append_expr(array.get()), NANOARROW_OK); + if (append_expr != nullptr) { + ASSERT_EQ(append_expr(array.get()), NANOARROW_OK); + } ASSERT_EQ(ArrowArrayFinishBuildingDefault(array.get(), nullptr), NANOARROW_OK); nanoarrow::UniqueArrayView array_view; @@ -87,8 +94,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnNull) { [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_NA); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, - R"({"name": null, "count": 0})"); + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": null, "count": 0})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -96,8 +102,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnNull) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema, "colname")); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, - R"({"name": "colname", "count": 0})"); + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": "colname", "count": 0})"); } TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnInt) { @@ -105,7 +110,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnInt) { [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_INT32); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": null, "count": 0, "VALIDITY": [], "DATA": []})"); // Without a null value @@ -304,7 +309,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnStruct) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetTypeStruct(schema, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": null, "count": 0, "VALIDITY": [], "children": []})"); // Non-empty struct @@ -320,7 +325,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnStruct) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema->children[1], "col2")); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": null, "count": 0, "VALIDITY": [], "children": [)" R"({"name": "col1", "count": 0}, {"name": "col2", "count": 0}]})"); } @@ -334,7 +339,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestColumnDenseUnion) { ArrowSchemaSetTypeUnion(schema, NANOARROW_TYPE_DENSE_UNION, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteColumnJSON, + /*append_expr*/ nullptr, &WriteColumnJSON, R"({"name": null, "count": 0, "TYPE_ID": [], "OFFSET": [], "children": []})"); } @@ -346,8 +351,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestBatch) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetTypeStruct(schema, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteBatchJSON, - R"({"count": 0, "columns": []})"); + /*append_expr*/ nullptr, &WriteBatchJSON, R"({"count": 0, "columns": []})"); } TEST(NanoarrowTestingTest, NanoarrowTestingTestSchema) { @@ -358,8 +362,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestSchema) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetTypeStruct(schema, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteSchemaJSON, - R"({"fields": []})"); + /*append_expr*/ nullptr, &WriteSchemaJSON, R"({"fields": []})"); // More than zero fields TestWriteJSON( @@ -372,7 +375,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestSchema) { ArrowSchemaSetType(schema->children[1], NANOARROW_TYPE_STRING)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteSchemaJSON, + /*append_expr*/ nullptr, &WriteSchemaJSON, R"({"fields": [)" R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": []}, )" R"({"name": null, "nullable": true, "type": {"name": "utf8"}, "children": []}]})"); @@ -384,7 +387,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldBasic) { NANOARROW_RETURN_NOT_OK(ArrowSchemaInitFromType(schema, NANOARROW_TYPE_NA)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": []})"); TestWriteJSON( @@ -393,7 +396,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldBasic) { schema->flags = 0; return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": false, "type": {"name": "null"}, "children": []})"); TestWriteJSON( @@ -402,7 +405,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldBasic) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetName(schema, "colname")); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": "colname", "nullable": true, "type": {"name": "null"}, "children": []})"); } @@ -415,7 +418,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldDict) { ArrowSchemaInitFromType(schema->dictionary, NANOARROW_TYPE_STRING)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "utf8"}, )" R"("dictionary": {"id": 0, "indexType": {"name": "int", "bitWidth": 16, "isSigned": true}, )" R"("isOrdered": false}, "children": []})"); @@ -428,7 +431,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldMetadata) { NANOARROW_RETURN_NOT_OK(ArrowSchemaInitFromType(schema, NANOARROW_TYPE_NA)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": []})"); // Non-null but zero-size metadata @@ -438,7 +441,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldMetadata) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetMetadata(schema, "\0\0\0\0")); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": [], "metadata": []})"); // Non-zero size metadata @@ -456,7 +459,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldMetadata) { ArrowSchemaSetMetadata(schema, reinterpret_cast(buffer->data))); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": [], )" R"("metadata": [{"key": "k1", "value": "v1"}, {"key": "k2", "value": "v2"}]})"); } @@ -472,7 +475,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestFieldNested) { ArrowSchemaSetType(schema->children[1], NANOARROW_TYPE_STRING)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteFieldJSON, + /*append_expr*/ nullptr, &WriteFieldJSON, R"({"name": null, "nullable": true, "type": {"name": "struct"}, "children": [)" R"({"name": null, "nullable": true, "type": {"name": "null"}, "children": []}, )" R"({"name": null, "nullable": true, "type": {"name": "utf8"}, "children": []}]})"); @@ -483,78 +486,72 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypePrimitive) { [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_NA); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "null"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "null"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_BOOL); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "bool"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "bool"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_INT8); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "int", "bitWidth": 8, "isSigned": true})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_UINT8); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "int", "bitWidth": 8, "isSigned": false})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_HALF_FLOAT); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "floatingpoint", "precision": "HALF"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_FLOAT); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "floatingpoint", "precision": "SINGLE"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_DOUBLE); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "floatingpoint", "precision": "DOUBLE"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_STRING); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "utf8"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "utf8"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_LARGE_STRING); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "largeutf8"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "largeutf8"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_BINARY); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "binary"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "binary"})"); TestWriteJSON( [](ArrowSchema* schema) { return ArrowSchemaInitFromType(schema, NANOARROW_TYPE_LARGE_BINARY); }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "largebinary"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "largebinary"})"); } TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { @@ -565,7 +562,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetTypeFixedSize(schema, NANOARROW_TYPE_FIXED_SIZE_BINARY, 123)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "fixedsizebinary", "byteWidth": 123})"); TestWriteJSON( @@ -575,7 +572,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetTypeDecimal(schema, NANOARROW_TYPE_DECIMAL128, 10, 3)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "decimal", "bitWidth": 128, "precision": 10, "scale": 3})"); TestWriteJSON( @@ -584,8 +581,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { NANOARROW_RETURN_NOT_OK(ArrowSchemaSetTypeStruct(schema, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "struct"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "struct"})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -595,8 +591,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetType(schema->children[0], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "list"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "list"})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -608,8 +603,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetType(schema->children[0]->children[1], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "map", "keysSorted": false})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "map", "keysSorted": false})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -622,8 +616,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { schema->flags = ARROW_FLAG_MAP_KEYS_SORTED; return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "map", "keysSorted": true})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "map", "keysSorted": true})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -633,8 +626,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetType(schema->children[0], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, - R"({"name": "largelist"})"); + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "largelist"})"); TestWriteJSON( [](ArrowSchema* schema) { @@ -645,7 +637,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeParameterized) { ArrowSchemaSetType(schema->children[0], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "fixedsizelist", "listSize": 12})"); } @@ -657,7 +649,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeUnion) { ArrowSchemaSetTypeUnion(schema, NANOARROW_TYPE_SPARSE_UNION, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "union", "mode": "SPARSE", "typeIds": []})"); TestWriteJSON( @@ -671,7 +663,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeUnion) { ArrowSchemaSetType(schema->children[1], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "union", "mode": "SPARSE", "typeIds": [0,1]})"); TestWriteJSON( @@ -681,7 +673,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeUnion) { ArrowSchemaSetTypeUnion(schema, NANOARROW_TYPE_DENSE_UNION, 0)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "union", "mode": "DENSE", "typeIds": []})"); TestWriteJSON( @@ -695,7 +687,7 @@ TEST(NanoarrowTestingTest, NanoarrowTestingTestTypeUnion) { ArrowSchemaSetType(schema->children[1], NANOARROW_TYPE_INT32)); return NANOARROW_OK; }, - [](ArrowArray* array) { return NANOARROW_OK; }, &WriteTypeJSON, + /*append_expr*/ nullptr, &WriteTypeJSON, R"({"name": "union", "mode": "DENSE", "typeIds": [0,1]})"); } diff --git a/src/nanoarrow/schema_test.cc b/src/nanoarrow/schema_test.cc index acafb3a2c..6e521eefa 100644 --- a/src/nanoarrow/schema_test.cc +++ b/src/nanoarrow/schema_test.cc @@ -1510,7 +1510,8 @@ TEST(MetadataTest, Metadata) { std::string simple_metadata = SimpleMetadata(); EXPECT_EQ(ArrowMetadataSizeOf(nullptr), 0); - EXPECT_EQ(ArrowMetadataSizeOf(simple_metadata.data()), simple_metadata.size()); + EXPECT_EQ(ArrowMetadataSizeOf(simple_metadata.data()), + static_cast(simple_metadata.size())); EXPECT_EQ(ArrowMetadataHasKey(simple_metadata.data(), ArrowCharView("key")), 1); EXPECT_EQ(ArrowMetadataHasKey(simple_metadata.data(), ArrowCharView("not_a_key")), 0); diff --git a/src/nanoarrow/utils_test.cc b/src/nanoarrow/utils_test.cc index 8b4fe9226..d35ba3f28 100644 --- a/src/nanoarrow/utils_test.cc +++ b/src/nanoarrow/utils_test.cc @@ -166,6 +166,8 @@ struct CustomFreeData { static void CustomFree(struct ArrowBufferAllocator* allocator, uint8_t* ptr, int64_t size) { + NANOARROW_UNUSED(ptr); + NANOARROW_UNUSED(size); auto data = reinterpret_cast(allocator->private_data); ArrowFree(data->pointer_proxy); data->pointer_proxy = nullptr;