diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h new file mode 100644 index 00000000000..5b6a92cb444 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h new file mode 100644 index 00000000000..46b0097587a --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h new file mode 100644 index 00000000000..ee940379f10 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h new file mode 100644 index 00000000000..d06f5f34aec --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h new file mode 100644 index 00000000000..506b7c20660 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h new file mode 100644 index 00000000000..7a416ba9416 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h new file mode 100644 index 00000000000..2464c55fc20 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/Firestore/CHANGELOG.md b/Firestore/CHANGELOG.md index bcb63c64c11..fa8af8f9e0f 100644 --- a/Firestore/CHANGELOG.md +++ b/Firestore/CHANGELOG.md @@ -1,3 +1,7 @@ +# Unreleased +- [feature] Adds support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BSONObjectId, + BSONTimestamp, and BSONBinaryData. (#14800) + # 11.12.0 - [fixed] Fixed the `null` value handling in `isNotEqualTo` and `notIn` filters. diff --git a/Firestore/Example/Firestore.xcodeproj/project.pbxproj b/Firestore/Example/Firestore.xcodeproj/project.pbxproj index 8deefcabab8..711cce62166 100644 --- a/Firestore/Example/Firestore.xcodeproj/project.pbxproj +++ b/Firestore/Example/Firestore.xcodeproj/project.pbxproj @@ -237,6 +237,7 @@ 20A93AC59CD5A7AC41F10412 /* thread_safe_memoizer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1A8141230C7E3986EACEF0B6 /* thread_safe_memoizer_test.cc */; }; 211A60ECA3976D27C0BF59BB /* md5_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 3D050936A2D52257FD17FB6E /* md5_test.cc */; }; 21836C4D9D48F962E7A3A244 /* ordered_code_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380D03201BC6E400D97691 /* ordered_code_test.cc */; }; + 21966DA1684600B6B9B912FE /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 21A2A881F71CB825299DF06E /* hard_assert_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 444B7AB3F5A2929070CB1363 /* hard_assert_test.cc */; }; 21C17F15579341289AD01051 /* persistence_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9113B6F513D0473AEABBAF1F /* persistence_testing.cc */; }; 21E588CF29C72813D8A7A0A1 /* FSTExceptionCatcher.m in Sources */ = {isa = PBXBuildFile; fileRef = B8BFD9B37D1029D238BDD71E /* FSTExceptionCatcher.m */; }; @@ -275,6 +276,7 @@ 26C4E52128C8E7B5B96BECC4 /* defer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8ABAC2E0402213D837F73DC3 /* defer_test.cc */; }; 26C577D159CFFD73E24D543C /* memory_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 74FBEFA4FE4B12C435011763 /* memory_mutation_queue_test.cc */; }; 26CB3D7C871BC56456C6021E /* timestamp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = ABF6506B201131F8005F2C74 /* timestamp_test.cc */; }; + 26E36A01DA79CC72F88E3A21 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 276A563D546698B6AAC20164 /* annotations.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 618BBE9520B89AAC00B5BCE7 /* annotations.pb.cc */; }; 27AF4C4BAFE079892D4F5341 /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B3E4A77493524333133C5DC /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json */; }; 27E46C94AAB087C80A97FF7F /* FIRServerTimestampTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E06E202154D600B64F25 /* FIRServerTimestampTests.mm */; }; @@ -298,6 +300,7 @@ 2AD8EE91928AE68DF268BEDA /* limbo_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA129E1F315EE100DD57A1 /* limbo_spec_test.json */; }; 2AD98CD29CC6F820A74CDD5E /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B59C0A7B2A4548496ED4E7D /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json */; }; 2AE3914BBC4EDF91BD852939 /* memory_query_engine_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8EF6A33BC2D84233C355F1D0 /* memory_query_engine_test.cc */; }; + 2B3C73B6702180419FC5460A /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 2B4021C3E663DDDDD512E961 /* objc_type_traits_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 2A0CF41BA5AED6049B0BEB2C /* objc_type_traits_apple_test.mm */; }; 2B4234B962625F9EE68B31AC /* index_manager_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AE4A9E38D65688EE000EE2A1 /* index_manager_test.cc */; }; 2B4D0509577E5CE0B0B8CEDF /* message_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = CE37875365497FFA8687B745 /* message_test.cc */; }; @@ -435,6 +438,7 @@ 44C4244E42FFFB6E9D7F28BA /* byte_stream_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 432C71959255C5DBDF522F52 /* byte_stream_test.cc */; }; 44EAF3E6EAC0CC4EB2147D16 /* transform_operation_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 33607A3AE91548BD219EC9C6 /* transform_operation_test.cc */; }; 451EFFB413364E5A420F8B2D /* thread_safe_memoizer_testing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */; }; + 455C31EB671A1EC9EB7A58CC /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 4562CDD90F5FF0491F07C5DA /* leveldb_opener_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 75860CD13AF47EB1EA39EC2F /* leveldb_opener_test.cc */; }; 457171CE2510EEA46F7D8A30 /* FIRFirestoreTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5467FAFF203E56F8009C9584 /* FIRFirestoreTests.mm */; }; 45939AFF906155EA27D281AB /* annotations.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 618BBE9520B89AAC00B5BCE7 /* annotations.pb.cc */; }; @@ -499,6 +503,7 @@ 4DAF501EE4B4DB79ED4239B0 /* secure_random_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54740A531FC913E500713A1A /* secure_random_test.cc */; }; 4DAFC3A3FD5E96910A517320 /* fake_target_metadata_provider.cc in Sources */ = {isa = PBXBuildFile; fileRef = 71140E5D09C6E76F7C71B2FC /* fake_target_metadata_provider.cc */; }; 4DC660A62BC2B6369DA5C563 /* status_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54A0352C20A3B3D7003E0143 /* status_test.cc */; }; + 4DE1DCA66D728E812A72F624 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 4DF18D15AC926FB7A4888313 /* lru_garbage_collector_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 277EAACC4DD7C21332E8496A /* lru_garbage_collector_test.cc */; }; 4E0777435A9A26B8B2C08A1E /* remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 7EB299CF85034F09CFD6F3FD /* remote_document_cache_test.cc */; }; 4E2E0314F9FDD7BCED60254A /* counting_query_engine.cc in Sources */ = {isa = PBXBuildFile; fileRef = 99434327614FEFF7F7DC88EC /* counting_query_engine.cc */; }; @@ -755,6 +760,7 @@ 627253FDEC6BB5549FE77F4E /* tree_sorted_map_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 549CCA4D20A36DBB00BCEB75 /* tree_sorted_map_test.cc */; }; 62B1C1100A8C68D94565916C /* document_overlay_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */; }; 62DA31B79FE97A90EEF28B0B /* delayed_constructor_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D0A6E9136804A41CEC9D55D4 /* delayed_constructor_test.cc */; }; + 62E181B9AB1568F9D332EA7C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 62E54B842A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; 62E54B852A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; 62E54B862A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; @@ -888,6 +894,7 @@ 79D86DD18BB54D2D69DC457F /* leveldb_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 0840319686A223CC4AD3FAB1 /* leveldb_remote_document_cache_test.cc */; }; 7A2D523AEF58B1413CC8D64F /* query_engine_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B8A853940305237AFDA8050B /* query_engine_test.cc */; }; 7A3BE0ED54933C234FDE23D1 /* leveldb_util_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 332485C4DCC6BA0DBB5E31B7 /* leveldb_util_test.cc */; }; + 7A5E96499414E3D3DCFFF52F /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 7A66A2CB5CF33F0C28202596 /* status_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54A0352C20A3B3D7003E0143 /* status_test.cc */; }; 7A7DB86955670B85B4514A1F /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B59C0A7B2A4548496ED4E7D /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json */; }; 7A7EC216A0015D7620B4FF3E /* string_format_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 9CFD366B783AE27B9E79EE7A /* string_format_apple_test.mm */; }; @@ -907,6 +914,7 @@ 7C1DC1B44729381126D083AE /* leveldb_snappy_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D9D94300B9C02F7069523C00 /* leveldb_snappy_test.cc */; }; 7C5E017689012489AAB7718D /* CodableGeoPointTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5495EB022040E90200EBA509 /* CodableGeoPointTests.swift */; }; 7C7BA1DB0B66EB899A928283 /* hashing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54511E8D209805F8005BD28F /* hashing_test.cc */; }; + 7CD026FE4246C540F4231E4C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 7D25D41B013BB70ADE526055 /* target_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 526D755F65AC676234F57125 /* target_test.cc */; }; 7D320113FD076A1EF9A8B612 /* filter_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = F02F734F272C3C70D1307076 /* filter_test.cc */; }; 7D3207DEE229EFCF16E52693 /* Validation_BloomFilterTest_MD5_500_01_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4BD051DBE754950FEAC7A446 /* Validation_BloomFilterTest_MD5_500_01_bloom_filter_proto.json */; }; @@ -996,6 +1004,7 @@ 8C39F6D4B3AA9074DF00CFB8 /* string_util_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380CFC201A2EE200D97691 /* string_util_test.cc */; }; 8C602DAD4E8296AB5EFB962A /* firestore.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D421C2DDC800EFB9CC /* firestore.pb.cc */; }; 8C82D4D3F9AB63E79CC52DC8 /* Pods_Firestore_IntegrationTests_iOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = ECEBABC7E7B693BE808A1052 /* Pods_Firestore_IntegrationTests_iOS.framework */; }; + 8CBCEB837CE378D44135F64A /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; 8D0EF43F1B7B156550E65C20 /* FSTGoogleTestTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 54764FAE1FAA21B90085E60A /* FSTGoogleTestTests.mm */; }; 8D67BAAD6D2F1913BACA6AC1 /* thread_safe_memoizer_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */; }; 8DBA8DC55722ED9D3A1BB2C9 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 1A7D48A017ECB54FD381D126 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json */; }; @@ -1050,6 +1059,7 @@ 977E0DA564D6EAF975A4A1A0 /* settings_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = DD12BC1DB2480886D2FB0005 /* settings_test.cc */; }; 9783FAEA4CF758E8C4C2D76E /* hashing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54511E8D209805F8005BD28F /* hashing_test.cc */; }; 978D9EFDC56CC2E1FA468712 /* leveldb_snappy_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D9D94300B9C02F7069523C00 /* leveldb_snappy_test.cc */; }; + 984135015B443110FF60F86F /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 9860F493EBF43AF5AC0A88BD /* empty_credentials_provider_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8FA60B08D59FEA0D6751E87F /* empty_credentials_provider_test.cc */; }; 98708140787A9465D883EEC9 /* leveldb_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */; }; 98FE82875A899A40A98AAC22 /* leveldb_opener_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 75860CD13AF47EB1EA39EC2F /* leveldb_opener_test.cc */; }; @@ -1075,6 +1085,7 @@ 9D71628E38D9F64C965DF29E /* FSTAPIHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E04E202154AA00B64F25 /* FSTAPIHelpers.mm */; }; 9E1997789F19BF2E9029012E /* FIRCompositeIndexQueryTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 65AF0AB593C3AD81A1F1A57E /* FIRCompositeIndexQueryTests.mm */; }; 9E656F4FE92E8BFB7F625283 /* to_string_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B696858D2214B53900271095 /* to_string_test.cc */; }; + 9ED94C2008F1475A0DC6D3BE /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 9EE1447AA8E68DF98D0590FF /* precondition_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 549CCA5520A36E1F00BCEB75 /* precondition_test.cc */; }; 9EE81B1FB9B7C664B7B0A904 /* resume_token_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA12A41F315EE100DD57A1 /* resume_token_spec_test.json */; }; 9F41D724D9947A89201495AD /* limit_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA129F1F315EE100DD57A1 /* limit_spec_test.json */; }; @@ -1098,6 +1109,7 @@ A25FF76DEF542E01A2DF3B0E /* time_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5497CB76229DECDE000FB92F /* time_testing.cc */; }; A27096F764227BC73526FED3 /* leveldb_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 0840319686A223CC4AD3FAB1 /* leveldb_remote_document_cache_test.cc */; }; A27908A198E1D2230C1801AC /* bundle_serializer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B5C2A94EE24E60543F62CC35 /* bundle_serializer_test.cc */; }; + A2905C9606C844D7C44B21D7 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; A2E9978E02F7BCB016555F09 /* Validation_BloomFilterTest_MD5_1_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 3369AC938F82A70685C5ED58 /* Validation_BloomFilterTest_MD5_1_1_membership_test_result.json */; }; A3262936317851958C8EABAF /* byte_stream_cpp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 01D10113ECC5B446DB35E96D /* byte_stream_cpp_test.cc */; }; A4757C171D2407F61332EA38 /* byte_stream_cpp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 01D10113ECC5B446DB35E96D /* byte_stream_cpp_test.cc */; }; @@ -1128,6 +1140,7 @@ A873EE3C8A97C90BA978B68A /* firebase_app_check_credentials_provider_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = F119BDDF2F06B3C0883B8297 /* firebase_app_check_credentials_provider_test.mm */; }; A8AF92A35DFA30EEF9C27FB7 /* database_info_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB38D92E20235D22000A432D /* database_info_test.cc */; }; A8C9FF6D13E6C83D4AB54EA7 /* secure_random_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54740A531FC913E500713A1A /* secure_random_test.cc */; }; + A8F0E84A4D8F44B4EEE3155C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; A907244EE37BC32C8D82948E /* FSTSpecTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E03020213FFC00B64F25 /* FSTSpecTests.mm */; }; A9206FF8FF8834347E9C7DDB /* leveldb_overlay_migration_manager_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D8A6D52723B1BABE1B7B8D8F /* leveldb_overlay_migration_manager_test.cc */; }; A97ED2BAAEDB0F765BBD5F98 /* local_store_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 307FF03D0297024D59348EBD /* local_store_test.cc */; }; @@ -1216,6 +1229,7 @@ B54BA1E76636C0C93334271B /* settings_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = DD12BC1DB2480886D2FB0005 /* settings_test.cc */; }; B576823475FBCA5EFA583F9C /* leveldb_migrations_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = EF83ACD5E1E9F25845A9ACED /* leveldb_migrations_test.cc */; }; B592DB7DB492B1C1D5E67D01 /* write.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D921C2DDC800EFB9CC /* write.pb.cc */; }; + B59498DE96F8B6F8D5C0788F /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; B5AEF7E4EBC29653DEE856A2 /* strerror_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 358C3B5FE573B1D60A4F7592 /* strerror_test.cc */; }; B60BAF9ED610F9D4E245EEB3 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 1A7D48A017ECB54FD381D126 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json */; }; B6152AD7202A53CB000E5744 /* document_key_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B6152AD5202A5385000E5744 /* document_key_test.cc */; }; @@ -1368,6 +1382,7 @@ CE2962775B42BDEEE8108567 /* leveldb_lru_garbage_collector_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B629525F7A1AAC1AB765C74F /* leveldb_lru_garbage_collector_test.cc */; }; CE411D4B70353823DE63C0D5 /* bundle_loader_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = A853C81A6A5A51C9D0389EDA /* bundle_loader_test.cc */; }; CEA91CE103B42533C54DBAD6 /* memory_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1CA9800A53669EFBFFB824E3 /* memory_remote_document_cache_test.cc */; }; + CEE39EC40FC07EBB02C2E341 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; CF18D52A88F4F6F62C5495EF /* thread_safe_memoizer_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */; }; CF1FB026CCB901F92B4B2C73 /* watch_change_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 2D7472BC70C024D736FF74D9 /* watch_change_test.cc */; }; CF5DE1ED21DD0A9783383A35 /* CodableIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 124C932B22C1642C00CA8C2D /* CodableIntegrationTests.swift */; }; @@ -1450,6 +1465,7 @@ DD04F7FE7A1ADE230A247DBC /* byte_stream_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 7628664347B9C96462D4BF17 /* byte_stream_apple_test.mm */; }; DD0F288108714D5A406D0A9F /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */; }; DD213F68A6F79E1D4924BD95 /* Pods_Firestore_Example_macOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E42355285B9EF55ABD785792 /* Pods_Firestore_Example_macOS.framework */; }; + DD540A3D4C3FC45FDBD89544 /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; DD5976A45071455FF3FE74B8 /* string_win_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 79507DF8378D3C42F5B36268 /* string_win_test.cc */; }; DD6C480629B3F87933FAF440 /* filesystem_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = BA02DA2FCD0001CFC6EB08DA /* filesystem_testing.cc */; }; DD935E243A64A4EB688E4C1C /* credentials_provider_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 2F4FA4576525144C5069A7A5 /* credentials_provider_test.cc */; }; @@ -1490,6 +1506,7 @@ E3319DC1804B69F0ED1FFE02 /* memory_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 74FBEFA4FE4B12C435011763 /* memory_mutation_queue_test.cc */; }; E375FBA0632EFB4D14C4E5A9 /* FSTGoogleTestTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 54764FAE1FAA21B90085E60A /* FSTGoogleTestTests.mm */; }; E37C52277CD00C57E5848A0E /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */; }; + E3D0FC852ADF4BEE74460FEF /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; E434ACDF63F219F3031F292E /* ConditionalConformanceTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E3228F51DCDC2E90D5C58F97 /* ConditionalConformanceTests.swift */; }; E435450184AEB51EE8435F66 /* write.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D921C2DDC800EFB9CC /* write.pb.cc */; }; E441A53D035479C53C74A0E6 /* recovery_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 9C1AFCC9E616EC33D6E169CF /* recovery_spec_test.json */; }; @@ -1640,6 +1657,7 @@ FD6F5B4497D670330E7F89DA /* document_overlay_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */; }; FD8EA96A604E837092ACA51D /* ordered_code_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380D03201BC6E400D97691 /* ordered_code_test.cc */; }; FE20E696E014CDCE918E91D6 /* md5_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = E2E39422953DE1D3C7B97E77 /* md5_testing.cc */; }; + FE4AC400F8F2D49B3E806420 /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; FE701C2D739A5371BCBD62B9 /* leveldb_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */; }; FE9131E2D84A560D287B6F90 /* resource.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1C3F7302BF4AE6CBC00ECDD0 /* resource.pb.cc */; }; FF3405218188DFCE586FB26B /* app_testing.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5467FB07203E6A44009C9584 /* app_testing.mm */; }; @@ -1737,12 +1755,14 @@ 214877F52A705012D6720CA0 /* object_value_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = object_value_test.cc; sourceTree = ""; }; 2220F583583EFC28DE792ABE /* Pods_Firestore_IntegrationTests_tvOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_IntegrationTests_tvOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 2286F308EFB0534B1BDE05B9 /* memory_target_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_target_cache_test.cc; sourceTree = ""; }; + 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; name = index_value_writer_test.cc; path = index/index_value_writer_test.cc; sourceTree = ""; }; 26DDBA115DEB88631B93F203 /* thread_safe_memoizer_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = thread_safe_memoizer_testing.h; sourceTree = ""; }; 277EAACC4DD7C21332E8496A /* lru_garbage_collector_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = lru_garbage_collector_test.cc; sourceTree = ""; }; 28B45B2104E2DAFBBF86DBB7 /* logic_utils_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = logic_utils_test.cc; sourceTree = ""; }; 29D9C76922DAC6F710BC1EF4 /* memory_document_overlay_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_document_overlay_cache_test.cc; sourceTree = ""; }; 2A0CF41BA5AED6049B0BEB2C /* objc_type_traits_apple_test.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = objc_type_traits_apple_test.mm; sourceTree = ""; }; 2B50B3A0DF77100EEE887891 /* Pods_Firestore_Tests_iOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_Tests_iOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 2CBE52190D157CE1096CD12E /* TypeTest.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; path = TypeTest.swift; sourceTree = ""; }; 2D7472BC70C024D736FF74D9 /* watch_change_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = watch_change_test.cc; sourceTree = ""; }; 2DAA26538D1A93A39F8AC373 /* nanopb_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = nanopb_testing.h; path = nanopb/nanopb_testing.h; sourceTree = ""; }; 2E48431B0EDA400BEA91D4AB /* Pods-Firestore_Tests_tvOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_tvOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_tvOS/Pods-Firestore_Tests_tvOS.debug.xcconfig"; sourceTree = ""; }; @@ -1777,7 +1797,7 @@ 4334F87873015E3763954578 /* status_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = status_testing.h; sourceTree = ""; }; 4375BDCDBCA9938C7F086730 /* Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json; sourceTree = ""; }; 444B7AB3F5A2929070CB1363 /* hard_assert_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = hard_assert_test.cc; sourceTree = ""; }; - 4564AD9C55EC39C080EB9476 /* globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = globals_cache_test.cc; sourceTree = ""; }; + 4564AD9C55EC39C080EB9476 /* globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = globals_cache_test.cc; sourceTree = ""; }; 478DC75A0DCA6249A616DD30 /* Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json; sourceTree = ""; }; 48D0915834C3D234E5A875A9 /* grpc_stream_tester.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = grpc_stream_tester.h; sourceTree = ""; }; 4B3E4A77493524333133C5DC /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json; sourceTree = ""; }; @@ -1895,7 +1915,7 @@ 5B5414D28802BC76FDADABD6 /* stream_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = stream_test.cc; sourceTree = ""; }; 5B96CC29E9946508F022859C /* Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json; sourceTree = ""; }; 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_1_01_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_1_01_membership_test_result.json; sourceTree = ""; }; - 5C6DEA63FBDE19D841291723 /* memory_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = memory_globals_cache_test.cc; sourceTree = ""; }; + 5C6DEA63FBDE19D841291723 /* memory_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_globals_cache_test.cc; sourceTree = ""; }; 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_mutation_queue_test.cc; sourceTree = ""; }; 5CAE131920FFFED600BE9A4A /* Firestore_Benchmarks_iOS.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = Firestore_Benchmarks_iOS.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 5CAE131D20FFFED600BE9A4A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; @@ -1945,7 +1965,7 @@ 69E6C311558EC77729A16CF1 /* Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS/Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig"; sourceTree = ""; }; 6A7A30A2DB3367E08939E789 /* bloom_filter.pb.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = bloom_filter.pb.h; sourceTree = ""; }; 6AE927CDFC7A72BF825BE4CB /* Pods-Firestore_Tests_tvOS.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_tvOS.release.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_tvOS/Pods-Firestore_Tests_tvOS.release.xcconfig"; sourceTree = ""; }; - 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = thread_safe_memoizer_testing.cc; sourceTree = ""; }; + 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = thread_safe_memoizer_testing.cc; sourceTree = ""; }; 6E8302DE210222ED003E1EA3 /* FSTFuzzTestFieldPath.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FSTFuzzTestFieldPath.h; sourceTree = ""; }; 6E8302DF21022309003E1EA3 /* FSTFuzzTestFieldPath.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = FSTFuzzTestFieldPath.mm; sourceTree = ""; }; 6EA39FDD20FE820E008D461F /* FSTFuzzTestSerializer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FSTFuzzTestSerializer.mm; sourceTree = ""; }; @@ -1981,6 +2001,7 @@ 7C3F995E040E9E9C5E8514BB /* query_listener_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = query_listener_test.cc; sourceTree = ""; }; 7C5C40C7BFBB86032F1DC632 /* FSTExceptionCatcher.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = FSTExceptionCatcher.h; sourceTree = ""; }; 7EB299CF85034F09CFD6F3FD /* remote_document_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = remote_document_cache_test.cc; sourceTree = ""; }; + 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRBsonTypesUnitTests.mm; sourceTree = ""; }; 84076EADF6872C78CDAC7291 /* bundle_builder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = bundle_builder.h; sourceTree = ""; }; 84434E57CA72951015FC71BC /* Pods-Firestore_FuzzTests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_FuzzTests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_FuzzTests_iOS/Pods-Firestore_FuzzTests_iOS.debug.xcconfig"; sourceTree = ""; }; 872C92ABD71B12784A1C5520 /* async_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = async_testing.cc; sourceTree = ""; }; @@ -2080,6 +2101,7 @@ C939D1789E38C09F9A0C1157 /* Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json; sourceTree = ""; }; CB7B2D4691C380DE3EB59038 /* lru_garbage_collector_test.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = lru_garbage_collector_test.h; sourceTree = ""; }; CC572A9168BBEF7B83E4BBC5 /* view_snapshot_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = view_snapshot_test.cc; sourceTree = ""; }; + CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; path = BsonTypesIntegrationTests.swift; sourceTree = ""; }; CCC9BD953F121B9E29F9AA42 /* user_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; name = user_test.cc; path = credentials/user_test.cc; sourceTree = ""; }; CD422AF3E4515FB8E9BE67A0 /* equals_tester.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = equals_tester.h; sourceTree = ""; }; CE37875365497FFA8687B745 /* message_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; name = message_test.cc; path = nanopb/message_test.cc; sourceTree = ""; }; @@ -2123,7 +2145,7 @@ E42355285B9EF55ABD785792 /* Pods_Firestore_Example_macOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_Example_macOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; E592181BFD7C53C305123739 /* Pods-Firestore_Tests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_iOS/Pods-Firestore_Tests_iOS.debug.xcconfig"; sourceTree = ""; }; E76F0CDF28E5FA62D21DE648 /* leveldb_target_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_target_cache_test.cc; sourceTree = ""; }; - EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = thread_safe_memoizer_testing_test.cc; sourceTree = ""; }; + EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = thread_safe_memoizer_testing_test.cc; sourceTree = ""; }; ECEBABC7E7B693BE808A1052 /* Pods_Firestore_IntegrationTests_iOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_IntegrationTests_iOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; EF3A65472C66B9560041EE69 /* FIRVectorValueTests.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRVectorValueTests.mm; sourceTree = ""; }; EF6C285029E462A200A7D4F1 /* FIRAggregateTests.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRAggregateTests.mm; sourceTree = ""; }; @@ -2141,7 +2163,7 @@ F848C41C03A25C42AD5A4BC2 /* target_cache_test.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = target_cache_test.h; sourceTree = ""; }; F869D85E900E5AF6CD02E2FC /* firebase_auth_credentials_provider_test.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; name = firebase_auth_credentials_provider_test.mm; path = credentials/firebase_auth_credentials_provider_test.mm; sourceTree = ""; }; FA2E9952BA2B299C1156C43C /* Pods-Firestore_Benchmarks_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Benchmarks_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Benchmarks_iOS/Pods-Firestore_Benchmarks_iOS.debug.xcconfig"; sourceTree = ""; }; - FC44D934D4A52C790659C8D6 /* leveldb_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = leveldb_globals_cache_test.cc; sourceTree = ""; }; + FC44D934D4A52C790659C8D6 /* leveldb_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_globals_cache_test.cc; sourceTree = ""; }; FC738525340E594EBFAB121E /* Pods-Firestore_Example_tvOS.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Example_tvOS.release.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Example_tvOS/Pods-Firestore_Example_tvOS.release.xcconfig"; sourceTree = ""; }; FF73B39D04D1760190E6B84A /* FIRQueryUnitTests.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRQueryUnitTests.mm; sourceTree = ""; }; FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = document_overlay_cache_test.cc; sourceTree = ""; }; @@ -2269,11 +2291,13 @@ children = ( EF6C286C29E6D22200A7D4F1 /* AggregationIntegrationTests.swift */, 062072B62773A055001655D7 /* AsyncAwaitIntegrationTests.swift */, + CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */, 124C932B22C1642C00CA8C2D /* CodableIntegrationTests.swift */, 3355BE9391CC4857AF0BDAE3 /* DatabaseTests.swift */, 62E54B832A9E910A003347C8 /* IndexingTests.swift */, 621D620928F9CE7400D2FA26 /* QueryIntegrationTests.swift */, 4D65F6E69993611D47DC8E7C /* SnapshotListenerSourceTests.swift */, + 2CBE52190D157CE1096CD12E /* TypeTest.swift */, EFF22EA92C5060A4009A369B /* VectorIntegrationTests.swift */, ); path = Integration; @@ -2444,6 +2468,7 @@ AB380CF7201937B800D97691 /* core */, 11BB7A1B7F6F482EFDBC5303 /* credentials */, 54EB764B202277970088B8F3 /* immutable */, + F952916DDF5B3977111173CC /* index */, 54995F70205B6E1A004EFFA0 /* local */, AB356EF5200E9D1A0089B766 /* model */, 5C332D7293E6114E491D3662 /* nanopb */, @@ -2963,6 +2988,7 @@ isa = PBXGroup; children = ( 1B9F95EC29FAD3F100EEC075 /* FIRAggregateQueryUnitTests.mm */, + 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */, 5492E045202154AA00B64F25 /* FIRCollectionReferenceTests.mm */, 5492E049202154AA00B64F25 /* FIRDocumentReferenceTests.mm */, 5492E04B202154AA00B64F25 /* FIRDocumentSnapshotTests.mm */, @@ -3089,6 +3115,14 @@ name = bundle; sourceTree = ""; }; + F952916DDF5B3977111173CC /* index */ = { + isa = PBXGroup; + children = ( + 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */, + ); + name = index; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -4157,6 +4191,7 @@ buildActionMask = 2147483647; files = ( 1B9F95F029FAD4D700EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + E3D0FC852ADF4BEE74460FEF /* FIRBsonTypesUnitTests.mm in Sources */, E11DDA3DD75705F26245E295 /* FIRCollectionReferenceTests.mm in Sources */, 46999832F7D1709B4C29FAA8 /* FIRDocumentReferenceTests.mm in Sources */, 6FD2369F24E884A9D767DD80 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -4254,6 +4289,7 @@ 48BC5801432127A90CFF55E3 /* index.pb.cc in Sources */, 167659CDCA47B450F2441454 /* index_backfiller_test.cc in Sources */, FAD97B82766AEC29B7B5A1B7 /* index_manager_test.cc in Sources */, + 26E36A01DA79CC72F88E3A21 /* index_value_writer_test.cc in Sources */, E084921EFB7CF8CB1E950D6C /* iterator_adaptors_test.cc in Sources */, 49C04B97AB282FFA82FD98CD /* latlng.pb.cc in Sources */, 292BCC76AF1B916752764A8F /* leveldb_bundle_cache_test.cc in Sources */, @@ -4380,6 +4416,7 @@ buildActionMask = 2147483647; files = ( 1B9F95F229FAD4E000EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + 7CD026FE4246C540F4231E4C /* FIRBsonTypesUnitTests.mm in Sources */, 00B7AFE2A7C158DD685EB5EE /* FIRCollectionReferenceTests.mm in Sources */, 25FE27330996A59F31713A0C /* FIRDocumentReferenceTests.mm in Sources */, 28E4B4A53A739AE2C9CF4159 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -4477,6 +4514,7 @@ 190F9885BAA81587F08CD26C /* index.pb.cc in Sources */, B845B9EDED330D0FDAD891BC /* index_backfiller_test.cc in Sources */, F58A23FEF328EB74F681FE83 /* index_manager_test.cc in Sources */, + CEE39EC40FC07EBB02C2E341 /* index_value_writer_test.cc in Sources */, 0E4C94369FFF7EC0C9229752 /* iterator_adaptors_test.cc in Sources */, 0FBDD5991E8F6CD5F8542474 /* latlng.pb.cc in Sources */, 513D34C9964E8C60C5C2EE1C /* leveldb_bundle_cache_test.cc in Sources */, @@ -4595,6 +4633,7 @@ EF6C286F29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B92773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, 733AFC467B600967536BD70F /* BasicCompileTests.swift in Sources */, + 21966DA1684600B6B9B912FE /* BsonTypesIntegrationTests.swift in Sources */, 79987AF2DF1FCE799008B846 /* CodableGeoPointTests.swift in Sources */, 1C79AE3FBFC91800E30D092C /* CodableIntegrationTests.swift in Sources */, BA3C0BA8082A6FB2546E47AC /* CodableTimestampTests.swift in Sources */, @@ -4604,6 +4643,7 @@ 1B9F95F329FAD4E100EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285329E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 95ED06D2B0078D3CDB821B68 /* FIRArrayTransformTests.mm in Sources */, + A8F0E84A4D8F44B4EEE3155C /* FIRBsonTypesUnitTests.mm in Sources */, DB3ADDA51FB93E84142EA90D /* FIRBundlesTests.mm in Sources */, 0500A324CEC854C5B0CF364C /* FIRCollectionReferenceTests.mm in Sources */, CAEA2A42D3120B48C6EE39E8 /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -4651,6 +4691,7 @@ 62E54B862A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620C28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, 1CFBD4563960D8A20C4679A3 /* SnapshotListenerSourceTests.swift in Sources */, + B59498DE96F8B6F8D5C0788F /* TypeTest.swift in Sources */, EFF22EAC2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 4D42E5C756229C08560DD731 /* XCTestCase+Await.mm in Sources */, 09BE8C01EC33D1FD82262D5D /* aggregate_query_test.cc in Sources */, @@ -4724,6 +4765,7 @@ 096BA3A3703AC1491F281618 /* index.pb.cc in Sources */, 9236478E01DF2EC7DF58B1FC /* index_backfiller_test.cc in Sources */, 4BFEEB7FDD7CD5A693B5B5C1 /* index_manager_test.cc in Sources */, + 4DE1DCA66D728E812A72F624 /* index_value_writer_test.cc in Sources */, FA334ADC73CFDB703A7C17CD /* iterator_adaptors_test.cc in Sources */, CBC891BEEC525F4D8F40A319 /* latlng.pb.cc in Sources */, 2E76BC76BBCE5FCDDCF5EEBE /* leveldb_bundle_cache_test.cc in Sources */, @@ -4842,6 +4884,7 @@ EF6C286E29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B82773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, B896E5DE1CC27347FAC009C3 /* BasicCompileTests.swift in Sources */, + 9ED94C2008F1475A0DC6D3BE /* BsonTypesIntegrationTests.swift in Sources */, 722F9A798F39F7D1FE7CF270 /* CodableGeoPointTests.swift in Sources */, CF5DE1ED21DD0A9783383A35 /* CodableIntegrationTests.swift in Sources */, 32B0739404FA588608E1F41A /* CodableTimestampTests.swift in Sources */, @@ -4851,6 +4894,7 @@ 1B9F95F129FAD4D800EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285229E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 660E99DEDA0A6FC1CCB200F9 /* FIRArrayTransformTests.mm in Sources */, + 62E181B9AB1568F9D332EA7C /* FIRBsonTypesUnitTests.mm in Sources */, AE068EDBC74AF27679CCB6DA /* FIRBundlesTests.mm in Sources */, BA0BB02821F1949783C8AA50 /* FIRCollectionReferenceTests.mm in Sources */, 9E1997789F19BF2E9029012E /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -4898,6 +4942,7 @@ 62E54B852A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620B28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, A0BC30D482B0ABD1A3A24CDC /* SnapshotListenerSourceTests.swift in Sources */, + 8CBCEB837CE378D44135F64A /* TypeTest.swift in Sources */, EFF22EAB2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 736C4E82689F1CA1859C4A3F /* XCTestCase+Await.mm in Sources */, 412BE974741729A6683C386F /* aggregate_query_test.cc in Sources */, @@ -4971,6 +5016,7 @@ 6E8CD8F545C8EDA84918977C /* index.pb.cc in Sources */, E25DCFEF318E003B8B7B9DC8 /* index_backfiller_test.cc in Sources */, 650B31A5EC6F8D2AEA79C350 /* index_manager_test.cc in Sources */, + A2905C9606C844D7C44B21D7 /* index_value_writer_test.cc in Sources */, 86494278BE08F10A8AAF9603 /* iterator_adaptors_test.cc in Sources */, 4173B61CB74EB4CD1D89EE68 /* latlng.pb.cc in Sources */, 1E8F5F37052AB0C087D69DF9 /* leveldb_bundle_cache_test.cc in Sources */, @@ -5107,6 +5153,7 @@ buildActionMask = 2147483647; files = ( 1B9F95EE29FAD4CD00EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + FE4AC400F8F2D49B3E806420 /* FIRBsonTypesUnitTests.mm in Sources */, 5492E050202154AA00B64F25 /* FIRCollectionReferenceTests.mm in Sources */, 5492E053202154AB00B64F25 /* FIRDocumentReferenceTests.mm in Sources */, 5492E055202154AB00B64F25 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -5204,6 +5251,7 @@ 77D38E78F7CCB8504450A8FB /* index.pb.cc in Sources */, 76FEBDD2793B729BAD2E84C7 /* index_backfiller_test.cc in Sources */, E6357221227031DD77EE5265 /* index_manager_test.cc in Sources */, + 2B3C73B6702180419FC5460A /* index_value_writer_test.cc in Sources */, 54A0353520A3D8CB003E0143 /* iterator_adaptors_test.cc in Sources */, 618BBEAE20B89AAC00B5BCE7 /* latlng.pb.cc in Sources */, 0EDFC8A6593477E1D17CDD8F /* leveldb_bundle_cache_test.cc in Sources */, @@ -5341,6 +5389,7 @@ EF6C286D29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B72773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, F731A0CCD0220B370BC1BE8B /* BasicCompileTests.swift in Sources */, + 455C31EB671A1EC9EB7A58CC /* BsonTypesIntegrationTests.swift in Sources */, 7C5E017689012489AAB7718D /* CodableGeoPointTests.swift in Sources */, 54C3242322D3B627000FE6DD /* CodableIntegrationTests.swift in Sources */, 70AB665EB6A473FF6C4CFD31 /* CodableTimestampTests.swift in Sources */, @@ -5350,6 +5399,7 @@ 1B9F95EF29FAD4CF00EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285129E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 73866AA12082B0A5009BB4FF /* FIRArrayTransformTests.mm in Sources */, + 7A5E96499414E3D3DCFFF52F /* FIRBsonTypesUnitTests.mm in Sources */, 4B54FA587C7107973FD76044 /* FIRBundlesTests.mm in Sources */, 7BCC5973C4F4FCC272150E31 /* FIRCollectionReferenceTests.mm in Sources */, 242BC62992ACC1A5B142CD4A /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -5397,6 +5447,7 @@ 62E54B842A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620A28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, B00F8D1819EE20C45B660940 /* SnapshotListenerSourceTests.swift in Sources */, + DD540A3D4C3FC45FDBD89544 /* TypeTest.swift in Sources */, EFF22EAA2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 5492E0442021457E00B64F25 /* XCTestCase+Await.mm in Sources */, B04E4FE20930384DF3A402F9 /* aggregate_query_test.cc in Sources */, @@ -5470,6 +5521,7 @@ 78E8DDDBE131F3DA9AF9F8B8 /* index.pb.cc in Sources */, CCE596E8654A4D2EEA75C219 /* index_backfiller_test.cc in Sources */, 2B4234B962625F9EE68B31AC /* index_manager_test.cc in Sources */, + 984135015B443110FF60F86F /* index_value_writer_test.cc in Sources */, 8A79DDB4379A063C30A76329 /* iterator_adaptors_test.cc in Sources */, 23C04A637090E438461E4E70 /* latlng.pb.cc in Sources */, 77C459976DCF7503AEE18F7F /* leveldb_bundle_cache_test.cc in Sources */, diff --git a/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme b/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme index 7a27b6f0c54..4884126571c 100644 --- a/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme +++ b/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme @@ -7,7 +7,11 @@ buildImplicitDependencies = "YES"> + buildForTesting = "YES" + buildForRunning = "YES" + buildForProfiling = "YES" + buildForArchiving = "YES" + buildForAnalyzing = "YES"> + + + + @@ -39,17 +52,6 @@ - - - - - - - - +#import +#import +#import +#import +#import +#import +#import + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface FIRBsonTypesUnitTests : XCTestCase +@end + +@implementation FIRBsonTypesUnitTests + +- (void)testMinKeySingleton { + FIRMinKey *minKey1 = [FIRMinKey shared]; + FIRMinKey *minKey2 = [FIRMinKey shared]; + XCTAssertEqual(minKey1, minKey2); + XCTAssertTrue([minKey1 isEqual:minKey2]); +} + +- (void)testMaxKeySingleton { + FIRMaxKey *maxKey1 = [FIRMaxKey shared]; + FIRMaxKey *maxKey2 = [FIRMaxKey shared]; + XCTAssertEqual(maxKey1, maxKey2); + XCTAssertTrue([maxKey1 isEqual:maxKey2]); +} + +- (void)testCreateAndReadAndCompareRegexValue { + FIRRegexValue *regex1 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex2 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex3 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"x"]; + FIRRegexValue *regex4 = [[FIRRegexValue alloc] initWithPattern:@"^bar" options:@"i"]; + + // Test reading the values back. + XCTAssertEqual(regex1.pattern, @"^foo"); + XCTAssertEqual(regex1.options, @"i"); + + // Test isEqual + XCTAssertTrue([regex1 isEqual:regex2]); + XCTAssertFalse([regex1 isEqual:regex3]); + XCTAssertFalse([regex1 isEqual:regex4]); +} + +- (void)testCreateAndReadAndCompareInt32Value { + FIRInt32Value *val1 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val2 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val3 = [[FIRInt32Value alloc] initWithValue:3]; + + // Test reading the value back + XCTAssertEqual(5, val1.value); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); +} + +- (void)testCreateAndReadAndCompareBsonObjectId { + FIRBSONObjectId *val1 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *val2 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *val3 = [[FIRBSONObjectId alloc] initWithValue:@"efgh"]; + + // Test reading the value back + XCTAssertEqual(@"abcd", val1.value); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); +} + +- (void)testCreateAndReadAndCompareBsonTimestamp { + FIRBSONTimestamp *val1 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val2 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val3 = [[FIRBSONTimestamp alloc] initWithSeconds:4444 increment:100]; + FIRBSONTimestamp *val4 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:444]; + + // Test reading the values back. + XCTAssertEqual(1234U, val1.seconds); + XCTAssertEqual(100U, val1.increment); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); + XCTAssertFalse([val1 isEqual:val4]); +} + +- (void)testCreateAndReadAndCompareBsonBinaryData { + uint8_t byteArray1[] = {0x01, 0x02, 0x03, 0x04, 0x05}; + uint8_t byteArray2[] = {0x01, 0x02, 0x03, 0x04, 0x99}; + NSData *data1 = [NSData dataWithBytes:byteArray1 length:sizeof(byteArray1)]; + NSData *data2 = [NSData dataWithBytes:byteArray1 length:sizeof(byteArray1)]; + NSData *data3 = [NSData dataWithBytes:byteArray2 length:sizeof(byteArray2)]; + + FIRBSONBinaryData *val1 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data1]; + FIRBSONBinaryData *val2 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data2]; + FIRBSONBinaryData *val3 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data3]; + FIRBSONBinaryData *val4 = [[FIRBSONBinaryData alloc] initWithSubtype:1 data:data1]; + + // Test reading the values back. + XCTAssertEqual(128, val1.subtype); + XCTAssertEqual(data1, val1.data); + XCTAssertTrue([val1.data isEqualToData:data1]); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); + XCTAssertFalse([val1 isEqual:val4]); +} + +- (void)testFieldValueMinKey { + FIRMinKey *minKey1 = [FIRMinKey shared]; + FIRMinKey *minKey2 = [FIRMinKey shared]; + XCTAssertEqual(minKey1, minKey2); + XCTAssertTrue([minKey1 isEqual:minKey2]); +} + +- (void)testFieldValueMaxKey { + FIRMaxKey *maxKey1 = [FIRMaxKey shared]; + FIRMaxKey *maxKey2 = [FIRMaxKey shared]; + XCTAssertEqual(maxKey1, maxKey2); + XCTAssertTrue([maxKey1 isEqual:maxKey2]); +} + +- (void)testFieldValueRegex { + FIRRegexValue *regex1 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex2 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + XCTAssertTrue([regex1 isEqual:regex2]); + XCTAssertEqual(@"^foo", regex2.pattern); + XCTAssertEqual(@"i", regex2.options); +} + +- (void)testFieldValueInt32 { + FIRInt32Value *val1 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val2 = [[FIRInt32Value alloc] initWithValue:5]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(5, val2.value); +} + +- (void)testFieldValueObjectId { + FIRBSONObjectId *oid1 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *oid2 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + XCTAssertTrue([oid1 isEqual:oid2]); + XCTAssertEqual(@"abcd", oid2.value); +} + +- (void)testFieldValueBsonTimestamp { + FIRBSONTimestamp *val1 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val2 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(1234U, val2.seconds); + XCTAssertEqual(100U, val2.increment); +} + +- (void)testFieldValueBsonBinaryData { + uint8_t byteArray[] = {0x01, 0x02, 0x03, 0x04, 0x05}; + NSData *data = [NSData dataWithBytes:byteArray length:sizeof(byteArray)]; + FIRBSONBinaryData *val1 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data]; + FIRBSONBinaryData *val2 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(128, val2.subtype); + XCTAssertEqual(data, val2.data); +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Example/Tests/API/FIRFieldValueTests.mm b/Firestore/Example/Tests/API/FIRFieldValueTests.mm index a7a347b0328..a9b288a0e46 100644 --- a/Firestore/Example/Tests/API/FIRFieldValueTests.mm +++ b/Firestore/Example/Tests/API/FIRFieldValueTests.mm @@ -44,4 +44,4 @@ - (void)testEquals { @end -NS_ASSUME_NONNULL_END +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/Firestore/Source/API/FIRBSONBinaryData.mm b/Firestore/Source/API/FIRBSONBinaryData.mm new file mode 100644 index 00000000000..cb53b0ab54f --- /dev/null +++ b/Firestore/Source/API/FIRBSONBinaryData.mm @@ -0,0 +1,56 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBSONBinaryData + +- (instancetype)initWithSubtype:(uint8_t)subtype data:(NSData *)data { + self = [super init]; + if (self) { + _subtype = subtype; + _data = data; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBSONBinaryData class]]) { + return NO; + } + + FIRBSONBinaryData *other = (FIRBSONBinaryData *)object; + return self.subtype == other.subtype && [self.data isEqualToData:other.data]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBSONBinaryData alloc] initWithSubtype:self.subtype data:self.data]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", + (unsigned int)self.subtype, self.data]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBSONObjectId.mm b/Firestore/Source/API/FIRBSONObjectId.mm new file mode 100644 index 00000000000..470e5f18fce --- /dev/null +++ b/Firestore/Source/API/FIRBSONObjectId.mm @@ -0,0 +1,54 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBSONObjectId + +- (instancetype)initWithValue:(NSString *)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBSONObjectId class]]) { + return NO; + } + + FIRBSONObjectId *other = (FIRBSONObjectId *)object; + return [self.value isEqualToString:other.value]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBSONObjectId alloc] initWithValue:self.value]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.value]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBSONTimestamp.mm b/Firestore/Source/API/FIRBSONTimestamp.mm new file mode 100644 index 00000000000..9ae6735692d --- /dev/null +++ b/Firestore/Source/API/FIRBSONTimestamp.mm @@ -0,0 +1,56 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBSONTimestamp + +- (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment { + self = [super init]; + if (self) { + _seconds = seconds; + _increment = increment; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBSONTimestamp class]]) { + return NO; + } + + FIRBSONTimestamp *other = (FIRBSONTimestamp *)object; + return self.seconds == other.seconds && self.increment == other.increment; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBSONTimestamp alloc] initWithSeconds:self.seconds increment:self.increment]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.seconds, + self.increment]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRInt32Value.mm b/Firestore/Source/API/FIRInt32Value.mm new file mode 100644 index 00000000000..8e61a07a856 --- /dev/null +++ b/Firestore/Source/API/FIRInt32Value.mm @@ -0,0 +1,54 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRInt32Value + +- (instancetype)initWithValue:(int)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRInt32Value class]]) { + return NO; + } + + FIRInt32Value *other = (FIRInt32Value *)object; + return self.value == other.value; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRInt32Value alloc] initWithValue:self.value]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.value]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.mm new file mode 100644 index 00000000000..84c970b9bd7 --- /dev/null +++ b/Firestore/Source/API/FIRMaxKey.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRMaxKey +static FIRMaxKey *sharedInstance = nil; +static dispatch_once_t onceToken; + ++ (FIRMaxKey *)shared { + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + self = [super init]; + return self; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + if (object == nil || [self class] != [object class]) { + return NO; + } + return YES; +} + +- (NSString *)description { + return [NSString stringWithFormat:@""]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.mm new file mode 100644 index 00000000000..ee0489bfdbc --- /dev/null +++ b/Firestore/Source/API/FIRMinKey.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRMinKey +static FIRMinKey *sharedInstance = nil; +static dispatch_once_t onceToken; + ++ (FIRMinKey *)shared { + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + self = [super init]; + return self; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + if (object == nil || [self class] != [object class]) { + return NO; + } + return YES; +} + +- (NSString *)description { + return [NSString stringWithFormat:@""]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRRegexValue.mm b/Firestore/Source/API/FIRRegexValue.mm new file mode 100644 index 00000000000..cde5b3cf462 --- /dev/null +++ b/Firestore/Source/API/FIRRegexValue.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRRegexValue + +- (instancetype)initWithPattern:(NSString *)pattern options:(NSString *)options { + self = [super init]; + if (self) { + _pattern = pattern; + _options = options; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRRegexValue class]]) { + return NO; + } + + FIRRegexValue *other = (FIRRegexValue *)object; + return + [self.pattern isEqualToString:other.pattern] && [self.options isEqualToString:other.options]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRRegexValue alloc] initWithPattern:self.pattern options:self.options]; +} + +- (NSString *)description { + return [NSString + stringWithFormat:@"", self.pattern, self.options]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FSTUserDataReader.mm b/Firestore/Source/API/FSTUserDataReader.mm index 4720c78d493..eac2e83f655 100644 --- a/Firestore/Source/API/FSTUserDataReader.mm +++ b/Firestore/Source/API/FSTUserDataReader.mm @@ -24,7 +24,14 @@ #import "Firestore/Source/API/FSTUserDataReader.h" +#import "FIRBSONBinaryData.h" +#import "FIRBSONObjectId.h" +#import "FIRBSONTimestamp.h" #import "FIRGeoPoint.h" +#import "FIRInt32Value.h" +#import "FIRMaxKey.h" +#import "FIRMinKey.h" +#import "FIRRegexValue.h" #import "FIRVectorValue.h" #import "Firestore/Source/API/FIRDocumentReference+Internal.h" @@ -378,6 +385,143 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } +- (Message)parseMinKey { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kMinKeyTypeFieldValue); + result->map_value.fields[0].value = *DeepClone(NullValue()).release(); + + return std::move(result); +} + +- (Message)parseMaxKey { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kMaxKeyTypeFieldValue); + result->map_value.fields[0].value = *DeepClone(NullValue()).release(); + + return std::move(result); +} + +- (Message)parseRegexValue:(FIRRegexValue *)regexValue + context:(ParseContext &&)context { + NSString *pattern = regexValue.pattern; + NSString *options = regexValue.options; + + __block Message regexMessage; + regexMessage->which_value_type = google_firestore_v1_Value_map_value_tag; + regexMessage->map_value = {}; + regexMessage->map_value.fields_count = 2; + regexMessage->map_value.fields = nanopb::MakeArray(2); + regexMessage->map_value.fields[0].key = + nanopb::CopyBytesArray(model::kRegexTypePatternFieldValue); + regexMessage->map_value.fields[0].value = *[self encodeStringValue:MakeString(pattern)].release(); + regexMessage->map_value.fields[1].key = + nanopb::CopyBytesArray(model::kRegexTypeOptionsFieldValue); + regexMessage->map_value.fields[1].value = *[self encodeStringValue:MakeString(options)].release(); + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kRegexTypeFieldValue); + result->map_value.fields[0].value = *regexMessage.release(); + + return std::move(result); +} + +- (Message)parseInt32Value:(FIRInt32Value *)int32 + context:(ParseContext &&)context { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kInt32TypeFieldValue); + // The 32-bit integer value is encoded as a 64-bit long in the proto. + result->map_value.fields[0].value = + *[self encodeInteger:static_cast(int32.value)].release(); + + return std::move(result); +} + +- (Message)parseBsonObjectId:(FIRBSONObjectId *)oid + context:(ParseContext &&)context { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonObjectIdTypeFieldValue); + result->map_value.fields[0].value = *[self encodeStringValue:MakeString(oid.value)].release(); + + return std::move(result); +} + +- (Message)parseBsonTimestamp:(FIRBSONTimestamp *)timestamp + context:(ParseContext &&)context { + uint32_t seconds = timestamp.seconds; + uint32_t increment = timestamp.increment; + + __block Message timestampMessage; + timestampMessage->which_value_type = google_firestore_v1_Value_map_value_tag; + timestampMessage->map_value = {}; + timestampMessage->map_value.fields_count = 2; + timestampMessage->map_value.fields = + nanopb::MakeArray(2); + + timestampMessage->map_value.fields[0].key = + nanopb::CopyBytesArray(model::kBsonTimestampTypeSecondsFieldValue); + // The 32-bit unsigned integer value is encoded as a 64-bit long in the proto. + timestampMessage->map_value.fields[0].value = + *[self encodeInteger:static_cast(seconds)].release(); + + timestampMessage->map_value.fields[1].key = + nanopb::CopyBytesArray(model::kBsonTimestampTypeIncrementFieldValue); + // The 32-bit unsigned integer value is encoded as a 64-bit long in the proto. + timestampMessage->map_value.fields[1].value = + *[self encodeInteger:static_cast(increment)].release(); + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonTimestampTypeFieldValue); + result->map_value.fields[0].value = *timestampMessage.release(); + + return std::move(result); +} + +- (Message)parseBsonBinaryData:(FIRBSONBinaryData *)binaryData + context:(ParseContext &&)context { + uint8_t subtypeByte = binaryData.subtype; + NSData *data = binaryData.data; + + // We need to prepend the data with one byte representation of the subtype. + NSMutableData *concatData = [NSMutableData data]; + [concatData appendBytes:&subtypeByte length:1]; + [concatData appendData:data]; + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonBinaryDataTypeFieldValue); + result->map_value.fields[0].value = + *[self encodeBlob:(nanopb::MakeByteString(concatData))].release(); + + return std::move(result); +} + - (Message)parseArray:(NSArray *)array context:(ParseContext &&)context { __block Message result; @@ -569,6 +713,25 @@ - (void)parseSentinelFieldValue:(FIRFieldValue *)fieldValue context:(ParseContex } else if ([input isKindOfClass:[FIRVectorValue class]]) { FIRVectorValue *vector = input; return [self parseVectorValue:vector context:std::move(context)]; + } else if ([input isKindOfClass:[FIRMinKey class]]) { + return [self parseMinKey]; + } else if ([input isKindOfClass:[FIRMaxKey class]]) { + return [self parseMaxKey]; + } else if ([input isKindOfClass:[FIRRegexValue class]]) { + FIRRegexValue *regex = input; + return [self parseRegexValue:regex context:std::move(context)]; + } else if ([input isKindOfClass:[FIRInt32Value class]]) { + FIRInt32Value *value = input; + return [self parseInt32Value:value context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBSONObjectId class]]) { + FIRBSONObjectId *oid = input; + return [self parseBsonObjectId:oid context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBSONTimestamp class]]) { + FIRBSONTimestamp *timestamp = input; + return [self parseBsonTimestamp:timestamp context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBSONBinaryData class]]) { + FIRBSONBinaryData *binaryData = input; + return [self parseBsonBinaryData:binaryData context:std::move(context)]; } else { ThrowInvalidArgument("Unsupported type: %s%s", NSStringFromClass([input class]), context.FieldDescription()); diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index 1e170531782..5cc6eaf1bc7 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -23,6 +23,13 @@ #include "Firestore/Source/API/FIRDocumentReference+Internal.h" #include "Firestore/Source/API/FIRFieldValue+Internal.h" #include "Firestore/Source/API/converters.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" #include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/include/firebase/firestore/timestamp.h" #include "Firestore/core/src/api/firestore.h" @@ -49,6 +56,11 @@ using firebase::firestore::google_firestore_v1_MapValue; using firebase::firestore::google_firestore_v1_Value; using firebase::firestore::google_protobuf_Timestamp; +using firebase::firestore::model::kRawBsonTimestampTypeIncrementFieldValue; +using firebase::firestore::model::kRawBsonTimestampTypeSecondsFieldValue; +using firebase::firestore::model::kRawRegexTypeOptionsFieldValue; +using firebase::firestore::model::kRawRegexTypePatternFieldValue; +using firebase::firestore::model::kRawVectorValueFieldKey; using firebase::firestore::util::MakeNSString; using model::DatabaseId; using model::DocumentKey; @@ -96,6 +108,9 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { case TypeOrder::kBoolean: return value.boolean_value ? @YES : @NO; case TypeOrder::kNumber: + if (value.which_value_type == google_firestore_v1_Value_map_value_tag) { + return [self convertedInt32:value.map_value]; + } return value.which_value_type == google_firestore_v1_Value_integer_value_tag ? @(value.integer_value) : @(value.double_value); @@ -106,10 +121,22 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { case TypeOrder::kGeoPoint: return MakeFIRGeoPoint( GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)); + case TypeOrder::kMinKey: + return [FIRMinKey shared]; + case TypeOrder::kMaxKey: + return [FIRMaxKey shared]; + case TypeOrder::kRegex: + return [self convertedRegex:value.map_value]; + case TypeOrder::kBsonObjectId: + return [self convertedBsonObjectId:value.map_value]; + case TypeOrder::kBsonTimestamp: + return [self convertedBsonTimestamp:value.map_value]; + case TypeOrder::kBsonBinaryData: + return [self convertedBsonBinaryData:value.map_value]; case TypeOrder::kVector: return [self convertedVector:value.map_value]; - case TypeOrder::kMaxValue: - // It is not possible for users to construct a kMaxValue manually. + case TypeOrder::kInternalMaxValue: + // It is not possible for users to construct a kInternalMaxValue manually. break; } @@ -130,7 +157,7 @@ - (FIRVectorValue *)convertedVector:(const google_firestore_v1_MapValue &)mapVal for (pb_size_t i = 0; i < mapValue.fields_count; ++i) { absl::string_view key = MakeStringView(mapValue.fields[i].key); const google_firestore_v1_Value &value = mapValue.fields[i].value; - if ((0 == key.compare(absl::string_view("value"))) && + if ((0 == key.compare(absl::string_view(kRawVectorValueFieldKey))) && value.which_value_type == google_firestore_v1_Value_array_value_tag) { return [FIRFieldValue vectorWithArray:[self convertedArray:value.array_value]]; } @@ -138,6 +165,99 @@ - (FIRVectorValue *)convertedVector:(const google_firestore_v1_MapValue &)mapVal return [FIRFieldValue vectorWithArray:@[]]; } +- (FIRRegexValue *)convertedRegex:(const google_firestore_v1_MapValue &)mapValue { + NSString *pattern = @""; + NSString *options = @""; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &innerValue = mapValue.fields[0].value; + if (innerValue.which_value_type == google_firestore_v1_Value_map_value_tag) { + for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { + absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); + const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; + if ((0 == key.compare(absl::string_view(kRawRegexTypePatternFieldValue))) && + value.which_value_type == google_firestore_v1_Value_string_value_tag) { + pattern = MakeNSString(MakeStringView(value.string_value)); + } + if ((0 == key.compare(absl::string_view(kRawRegexTypeOptionsFieldValue))) && + value.which_value_type == google_firestore_v1_Value_string_value_tag) { + options = MakeNSString(MakeStringView(value.string_value)); + } + } + } + } + + return [[FIRRegexValue alloc] initWithPattern:pattern options:options]; +} + +- (FIRInt32Value *)convertedInt32:(const google_firestore_v1_MapValue &)mapValue { + int32_t value = 0; + if (mapValue.fields_count == 1) { + value = static_cast(mapValue.fields[0].value.integer_value); + } + + return [[FIRInt32Value alloc] initWithValue:value]; +} + +- (FIRBSONObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue &)mapValue { + NSString *oid = @""; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &oidValue = mapValue.fields[0].value; + if (oidValue.which_value_type == google_firestore_v1_Value_string_value_tag) { + oid = MakeNSString(MakeStringView(oidValue.string_value)); + } + } + + return [[FIRBSONObjectId alloc] initWithValue:oid]; +} + +- (FIRBSONTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue &)mapValue { + uint32_t seconds = 0; + uint32_t increment = 0; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &innerValue = mapValue.fields[0].value; + if (innerValue.which_value_type == google_firestore_v1_Value_map_value_tag) { + for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { + absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); + const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; + if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeSecondsFieldValue))) && + value.which_value_type == google_firestore_v1_Value_integer_value_tag) { + // The value from the server is guaranteed to fit in a 32-bit unsigned integer. + seconds = static_cast(value.integer_value); + } + if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeIncrementFieldValue))) && + value.which_value_type == google_firestore_v1_Value_integer_value_tag) { + // The value from the server is guaranteed to fit in a 32-bit unsigned integer. + increment = static_cast(value.integer_value); + } + } + } + } + + return [[FIRBSONTimestamp alloc] initWithSeconds:seconds increment:increment]; +} + +- (FIRBSONBinaryData *)convertedBsonBinaryData:(const google_firestore_v1_MapValue &)mapValue { + uint8_t subtype = 0; + NSData *data = [[NSData alloc] init]; + + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &dataValue = mapValue.fields[0].value; + if (dataValue.which_value_type == google_firestore_v1_Value_bytes_value_tag) { + NSData *concatData = MakeNSData(dataValue.bytes_value); + if (concatData.length > 0) { + uint8_t buffer[1]; + [concatData getBytes:buffer length:1]; + subtype = buffer[0]; + } + if (concatData.length > 1) { + data = [concatData subdataWithRange:NSMakeRange(1, concatData.length - 1)]; + } + } + } + + return [[FIRBSONBinaryData alloc] initWithSubtype:subtype data:data]; +} + - (NSArray *)convertedArray:(const google_firestore_v1_ArrayValue &)arrayValue { NSMutableArray *result = [NSMutableArray arrayWithCapacity:arrayValue.values_count]; for (pb_size_t i = 0; i < arrayValue.values_count; ++i) { diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h new file mode 100644 index 00000000000..69282eea806 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h @@ -0,0 +1,50 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON Binary Data type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BSONBinaryData) +__attribute__((objc_subclassing_restricted)) +@interface FIRBSONBinaryData : NSObject + +/** An 8-bit unsigned integer denoting the subtype of the data. */ +@property(nonatomic, readonly) uint8_t subtype; + +/** The binary data. */ +@property(nonatomic, copy, readonly) NSData *data; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BSONBinaryData` constructed with the given subtype and data. + * @param subtype An 8-bit unsigned integer denoting the subtype of the data. + * @param data The binary data. + */ +- (instancetype)initWithSubtype:(uint8_t)subtype data:(nonnull NSData *)data; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h new file mode 100644 index 00000000000..cea2c1066be --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON ObjectId type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BSONObjectId) +__attribute__((objc_subclassing_restricted)) +@interface FIRBSONObjectId : NSObject + +/** The 24-character hex string representation of the ObjectId. */ +@property(nonatomic, copy, readonly) NSString *value; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BSONObjectId` constructed with the given value. + * @param value The 24-character hex string representation of the ObjectId. + */ +- (instancetype)initWithValue:(nonnull NSString *)value NS_SWIFT_NAME(init(_:)); + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h new file mode 100644 index 00000000000..cded36b8d7e --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h @@ -0,0 +1,51 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON timestamp type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BSONTimestamp) +__attribute__((objc_subclassing_restricted)) +@interface FIRBSONTimestamp : NSObject + +/** The underlying unsigned 32-bit integer for seconds */ +@property(nonatomic, readonly) uint32_t seconds; + +/** The underlying unsigned 32-bit integer for increment */ +@property(nonatomic, readonly) uint32_t increment; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BSONTimestamp` with the given seconds and increment values. + * @param seconds The underlying unsigned 32-bit integer for seconds. + * @param increment The underlying unsigned 32-bit integer for increment. + */ +- (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h new file mode 100644 index 00000000000..6c583dcf4e5 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a 32-bit integer type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(Int32Value) +__attribute__((objc_subclassing_restricted)) +@interface FIRInt32Value : NSObject + +/** The 32-bit integer value. */ +@property(nonatomic, readonly) int32_t value; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates an `Int32Value` constructed with the given value. + * @param value The 32-bit integer value to be stored. + */ +- (instancetype)initWithValue:(int)value NS_SWIFT_NAME(init(_:)); + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h new file mode 100644 index 00000000000..d6c3c0e3385 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -0,0 +1,37 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a "Max Key" type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(MaxKey) +__attribute__((objc_subclassing_restricted)) +@interface FIRMaxKey : NSObject + +/** Returns the only instance of MaxKey. */ +@property(class, readonly) FIRMaxKey *shared; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h new file mode 100644 index 00000000000..d82520574b6 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -0,0 +1,37 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a "Min Key" type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(MinKey) +__attribute__((objc_subclassing_restricted)) +@interface FIRMinKey : NSObject + +/** The only instance of MinKey. */ +@property(class, readonly) FIRMinKey *shared; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h new file mode 100644 index 00000000000..a13ac2e57c9 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h @@ -0,0 +1,50 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a regular expression type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(RegexValue) +__attribute__((objc_subclassing_restricted)) +@interface FIRRegexValue : NSObject + +/** The regular expression pattern */ +@property(nonatomic, copy, readonly) NSString *pattern; + +/** The regular expression options */ +@property(nonatomic, copy, readonly) NSString *options; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `RegexValue` constructed with the given pattern and options. + * @param pattern The regular expression pattern. + * @param options The regular expression options. + */ +- (instancetype)initWithPattern:(nonnull NSString *)pattern options:(nonnull NSString *)options; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift b/Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift new file mode 100644 index 00000000000..4a61ca4f812 --- /dev/null +++ b/Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BSONBinaryData. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONBinaryData class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBSONBinaryData: Codable { + var subtype: UInt8 { get } + var data: Data { get } + + init(subtype: UInt8, data: Data) +} + +/** The keys in an BSONBinaryData. Must match the properties of CodableBSONBinaryData. */ +private enum BSONBinaryDataKeys: String, CodingKey { + case subtype + case data +} + +/** + * An extension of BSONBinaryData that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBSONBinaryData { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BSONBinaryDataKeys.self) + let subtype = try container.decode(UInt8.self, forKey: .subtype) + let data = try container.decode(Data.self, forKey: .data) + self.init(subtype: subtype, data: data) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BSONBinaryDataKeys.self) + try container.encode(subtype, forKey: .subtype) + try container.encode(data, forKey: .data) + } +} + +/** Extends BSONBinaryData to conform to Codable. */ +extension FirebaseFirestore.BSONBinaryData: FirebaseFirestore.CodableBSONBinaryData {} diff --git a/Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift b/Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift new file mode 100644 index 00000000000..2f7761bb5c0 --- /dev/null +++ b/Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift @@ -0,0 +1,62 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BSONObjectId. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONObjectId class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBSONObjectId: Codable { + var value: String { get } + + init(_ value: String) +} + +/** The keys in an BSONObjectId. Must match the properties of CodableBSONObjectId. */ +private enum BSONObjectIdKeys: String, CodingKey { + case value +} + +/** + * An extension of BSONObjectId that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBSONObjectId { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BSONObjectIdKeys.self) + let value = try container.decode(String.self, forKey: .value) + self.init(value) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BSONObjectIdKeys.self) + try container.encode(value, forKey: .value) + } +} + +/** Extends BSONObjectId to conform to Codable. */ +extension FirebaseFirestore.BSONObjectId: FirebaseFirestore.CodableBSONObjectId {} diff --git a/Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift b/Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift new file mode 100644 index 00000000000..a3ab054a5fc --- /dev/null +++ b/Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BSONTimestamp. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONTimestamp class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBSONTimestamp: Codable { + var seconds: UInt32 { get } + var increment: UInt32 { get } + + init(seconds: UInt32, increment: UInt32) +} + +/** The keys in an BSONTimestamp. Must match the properties of CodableBSONTimestamp. */ +private enum BSONTimestampKeys: String, CodingKey { + case seconds + case increment +} + +/** + * An extension of BSONTimestamp that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBSONTimestamp { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BSONTimestampKeys.self) + let seconds = try container.decode(UInt32.self, forKey: .seconds) + let increment = try container.decode(UInt32.self, forKey: .increment) + self.init(seconds: seconds, increment: increment) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BSONTimestampKeys.self) + try container.encode(seconds, forKey: .seconds) + try container.encode(increment, forKey: .increment) + } +} + +/** Extends BSONTimestamp to conform to Codable. */ +extension FirebaseFirestore.BSONTimestamp: FirebaseFirestore.CodableBSONTimestamp {} diff --git a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift index 20f5a6e8b2b..65b19af0517 100644 --- a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift +++ b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift @@ -32,6 +32,13 @@ struct FirestorePassthroughTypes: StructureCodingPassthroughTypeResolver { t is Timestamp || t is FieldValue || t is DocumentReference || - t is VectorValue + t is VectorValue || + t is MinKey || + t is MaxKey || + t is RegexValue || + t is Int32Value || + t is BSONObjectId || + t is BSONTimestamp || + t is BSONBinaryData } } diff --git a/Firestore/Swift/Source/Codable/Int32Value+Codable.swift b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift new file mode 100644 index 00000000000..798cdcb9efb --- /dev/null +++ b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift @@ -0,0 +1,62 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an Int32Value. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the Int32Value class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableInt32Value: Codable { + var value: Int32 { get } + + init(_ value: Int32) +} + +/** The keys in an Int32Value. Must match the properties of CodableInt32Value. */ +private enum Int32ValueKeys: String, CodingKey { + case value +} + +/** + * An extension of Int32Value that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableInt32Value { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: Int32ValueKeys.self) + let value = try container.decode(Int32.self, forKey: .value) + self.init(value) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: Int32ValueKeys.self) + try container.encode(value, forKey: .value) + } +} + +/** Extends Int32Value to conform to Codable. */ +extension FirebaseFirestore.Int32Value: FirebaseFirestore.CodableInt32Value {} diff --git a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift new file mode 100644 index 00000000000..cf3e840903c --- /dev/null +++ b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift @@ -0,0 +1,65 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a MaxKey. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the MaxKey class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableMaxKey: Codable { + init() +} + +/** The keys in a MaxKey. */ +private enum MaxKeyKeys: String, CodingKey { + // We'll use a simple CodingKeys enum with a single case + // to represent the presence of the singleton. + case isFirestoreMaxKey +} + +/** + * An extension of MaxKey that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableMaxKey { + public init(from decoder: Decoder) throws { + // The presence of the `isFirestoreMaxKey` is enough to know that we + // should return the singleton. + let container = try decoder.container(keyedBy: MaxKeyKeys.self) + _ = try container.decodeIfPresent(Bool.self, forKey: .isFirestoreMaxKey) + self.init() + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: MaxKeyKeys.self) + // Encode a value of `true` to indicate the presence of MaxKey + try container.encode(true, forKey: .isFirestoreMaxKey) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.MaxKey: FirebaseFirestore.CodableMaxKey {} diff --git a/Firestore/Swift/Source/Codable/MinKey+Codable.swift b/Firestore/Swift/Source/Codable/MinKey+Codable.swift new file mode 100644 index 00000000000..9f388eceadb --- /dev/null +++ b/Firestore/Swift/Source/Codable/MinKey+Codable.swift @@ -0,0 +1,65 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a MinKey. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the MinKey class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableMinKey: Codable { + init() +} + +/** The keys in a MinKey. */ +private enum MinKeyKeys: String, CodingKey { + // We'll use a simple CodingKeys enum with a single case + // to represent the presence of the singleton. + case isFirestoreMinKey +} + +/** + * An extension of MinKey that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableMinKey { + public init(from decoder: Decoder) throws { + // The presence of the `isFirestoreMinKey` is enough to know that we + // should return the singleton. + let container = try decoder.container(keyedBy: MinKeyKeys.self) + _ = try container.decodeIfPresent(Bool.self, forKey: .isFirestoreMinKey) + self.init() + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: MinKeyKeys.self) + // Encode a value of `true` to indicate the presence of MinKey + try container.encode(true, forKey: .isFirestoreMinKey) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.MinKey: FirebaseFirestore.CodableMinKey {} diff --git a/Firestore/Swift/Source/Codable/RegexValue+Codable.swift b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift new file mode 100644 index 00000000000..fc750f49495 --- /dev/null +++ b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a RegexValue. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the RegexValue class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableRegexValue: Codable { + var pattern: String { get } + var options: String { get } + + init(pattern: String, options: String) +} + +/** The keys in a RegexValue. Must match the properties of CodableRegexValue. */ +private enum RegexValueKeys: String, CodingKey { + case pattern + case options +} + +/** + * An extension of RegexValue that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableRegexValue { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: RegexValueKeys.self) + let pattern = try container.decode(String.self, forKey: .pattern) + let options = try container.decode(String.self, forKey: .options) + self.init(pattern: pattern, options: options) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: RegexValueKeys.self) + try container.encode(pattern, forKey: .pattern) + try container.encode(options, forKey: .options) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.RegexValue: FirebaseFirestore.CodableRegexValue {} diff --git a/Firestore/Swift/Source/Codable/VectorValue+Codable.swift b/Firestore/Swift/Source/Codable/VectorValue+Codable.swift index 45f3176a74a..0c69db6eee9 100644 --- a/Firestore/Swift/Source/Codable/VectorValue+Codable.swift +++ b/Firestore/Swift/Source/Codable/VectorValue+Codable.swift @@ -29,7 +29,7 @@ private protocol CodableVectorValue: Codable { init(__array: [NSNumber]) } -/** The keys in a Timestamp. Must match the properties of CodableTimestamp. */ +/** The keys in a VectorValue. Must match the properties of CodableVectorValue. */ private enum VectorValueKeys: String, CodingKey { case array } diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift new file mode 100644 index 00000000000..5354b856e88 --- /dev/null +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -0,0 +1,720 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Combine +import FirebaseFirestore +import Foundation + +// iOS 15 required for test implementation, not BSON types +@available(iOS 15, tvOS 15, macOS 12.0, macCatalyst 13, watchOS 7, *) +class BsonTypesIntegrationTests: FSTIntegrationTestCase { + func toDataArray(_ snapshot: QuerySnapshot) -> [[String: Any]] { + return snapshot.documents.map { document in + document.data() + } + } + + func toDocIdArray(_ snapshot: QuerySnapshot) -> [String] { + return snapshot.documents.map { document in + document.documentID + } + } + + func setDocumentData( + _ documentDataMap: [String: [String: Any]], + toCollection: CollectionReference + ) async { + for (documentName, documentData) in documentDataMap { + do { + try await toCollection.document(documentName).setData(documentData) + } catch { + print("Failed to write documents to collection.") + } + } + } + + func verifySnapshot(snapshot: QuerySnapshot, + allData: [String: [String: Any]], + expectedDocIds: [String], + description: String) throws { + XCTAssertEqual(snapshot.count, expectedDocIds.count) + + XCTAssertTrue(expectedDocIds == toDocIdArray(snapshot), + "Did not get the same documents in query result set for '\(description)'. Expected Doc IDs: \(expectedDocIds), Actual Doc IDs: \(toDocIdArray(snapshot))") + + for i in 0 ..< expectedDocIds.count { + let expectedDocId = expectedDocIds[i] + let expectedDocData = allData[expectedDocId] ?? [:] + let actualDocData = snapshot.documents[i].data() + + // We don't need to compare expectedDocId and actualDocId because + // it's already been checked above. We only compare the data below. + let nsExpected = NSDictionary(dictionary: expectedDocData) + let nsActual = NSDictionary(dictionary: actualDocData) + XCTAssertTrue( + nsExpected.isEqual(nsActual), + "Did not get the same document content. Expected Doc Data: \(nsExpected), Actual Doc Data:\(nsActual)" + ) + } + } + + // Asserts that the given query produces the expected result for all of the + // following scenarios: + // 1. Using a snapshot listener to get the first snapshot for the query. + // 2. Performing the given query using source=server. + // 3. Performing the given query using source=cache. + func assertSdkQueryResultsConsistentWithBackend(_ documentDataMap: [String: [String: Any]], + collection: CollectionReference, + query: Query, + expectedResult: [String]) async throws { + let watchSnapshot = try await Future() { promise in + query.addSnapshotListener { snapshot, error in + if let error { + promise(Result.failure(error)) + } + if let snapshot { + promise(Result.success(snapshot)) + } + } + }.value + + try verifySnapshot( + snapshot: watchSnapshot, + allData: documentDataMap, + expectedDocIds: expectedResult, + description: "snapshot listener" + ) + + checkOnlineAndOfflineCollection(collection, query: query, matchesResult: expectedResult) + } + + func testCanWriteAndReadBsonTypes() async throws { + let collection = collectionRef() + let ref = try await collection.addDocument(data: [ + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), + "int32": Int32Value(1), + "min": MinKey.shared, + "max": MaxKey.shared, + "regex": RegexValue(pattern: "^foo", options: "i"), + ]) + + try await ref.updateData([ + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "timestamp": BSONTimestamp(seconds: 1, increment: 2), + "int32": Int32Value(2), + ]) + + let snapshot = try await ref.getDocument() + XCTAssertEqual( + snapshot.get("objectId") as? BSONObjectId, + BSONObjectId("507f191e810c19729de860ea") + ) + XCTAssertEqual( + snapshot.get("int32") as? Int32Value, + Int32Value(2) + ) + XCTAssertEqual( + snapshot.get("min") as? MinKey, + MinKey.shared + ) + XCTAssertEqual( + snapshot.get("max") as? MaxKey, + MaxKey.shared + ) + XCTAssertEqual( + snapshot.get("binary") as? BSONBinaryData, + BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + XCTAssertEqual( + snapshot.get("timestamp") as? BSONTimestamp, + BSONTimestamp(seconds: 1, increment: 2) + ) + XCTAssertEqual( + snapshot.get("regex") as? RegexValue, + RegexValue(pattern: "^foo", options: "i") + ) + } + + func testCanWriteAndReadBsonTypesOffline() throws { + let collection = collectionRef() + disableNetwork() + + let ref = collection.document("doc") + + // Adding docs to cache, do not wait for promise to resolve. + ref.setData([ + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), + "int32": Int32Value(1), + "min": MinKey.shared, + "max": MaxKey.shared, + "regex": RegexValue(pattern: "^foo", options: "i"), + ]) + ref.updateData([ + "binary": BSONBinaryData(subtype: 128, data: Data([1, 2, 3])), + "timestamp": BSONTimestamp(seconds: 1, increment: 2), + "int32": Int32Value(2), + ]) + + let snapshot = readDocument(forRef: ref, source: FirestoreSource.cache) + XCTAssertEqual( + snapshot.get("objectId") as? BSONObjectId, + BSONObjectId("507f191e810c19729de860ea") + ) + XCTAssertEqual( + snapshot.get("int32") as? Int32Value, + Int32Value(2) + ) + XCTAssertEqual( + snapshot.get("min") as? MinKey, + MinKey.shared + ) + XCTAssertEqual( + snapshot.get("max") as? MaxKey, + MaxKey.shared + ) + XCTAssertEqual( + snapshot.get("binary") as? BSONBinaryData, + BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) + ) + XCTAssertEqual( + snapshot.get("timestamp") as? BSONTimestamp, + BSONTimestamp(seconds: 1, increment: 2) + ) + XCTAssertEqual( + snapshot.get("regex") as? RegexValue, + RegexValue(pattern: "^foo", options: "i") + ) + } + + func testCanFilterAndOrderObjectIds() async throws { + let testDocs = [ + "a": ["key": BSONObjectId("507f191e810c19729de860ea")], + "b": ["key": BSONObjectId("507f191e810c19729de860eb")], + "c": ["key": BSONObjectId("507f191e810c19729de860ec")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThan: BSONObjectId("507f191e810c19729de860ea")) + .order(by: "key", descending: true) + + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", in: + [ + BSONObjectId("507f191e810c19729de860ea"), + BSONObjectId("507f191e810c19729de860eb"), + ]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "a"] + ) + } + + func testCanFilterAndOrderInt32Values() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": Int32Value(-1)], + "b": ["key": Int32Value(1)], + "c": ["key": Int32Value(2)], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThanOrEqualTo: Int32Value(1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", notIn: [Int32Value(1)]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "a"] + ) + } + + func testCanFilterAndOrderTimestampValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": BSONTimestamp(seconds: 1, increment: 1)], + "b": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "c": ["key": BSONTimestamp(seconds: 2, increment: 1)], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThan: BSONTimestamp(seconds: 1, increment: 1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", isNotEqualTo: BSONTimestamp(seconds: 1, increment: 1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "b"] + ) + } + + func testCanFilterAndOrderBinaryValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "b": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "c": ["key": BSONBinaryData(subtype: 2, data: Data([1, 2, 3]))], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField( + "key", + isGreaterThan: BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField( + "key", + isGreaterThanOrEqualTo: BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + .whereField( + "key", + isLessThan: BSONBinaryData(subtype: 2, data: Data([1, 2, 3])) + ) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "a"] + ) + } + + func testCanFilterAndOrderRegexValues() async throws { + let testDocs = [ + "a": ["key": RegexValue(pattern: "^bar", options: "i")], + "b": ["key": RegexValue(pattern: "^bar", options: "x")], + "c": ["key": RegexValue(pattern: "^baz", options: "i")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = + collection.whereFilter( + Filter.orFilter([ + Filter.whereField("key", isGreaterThan: RegexValue(pattern: "^bar", options: "x")), + Filter.whereField("key", isNotEqualTo: RegexValue(pattern: "^bar", options: "x")), + ]) + ).order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "a"] + ) + } + + func testCanFilterAndOrderMinKeyValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.shared], + "b": ["key": MinKey.shared], + "c": ["key": NSNull()], + "d": ["key": 1], + "e": ["key": MaxKey.shared], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isEqualTo: MinKey.shared) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "a"] + ) + + // TODO(b/410032145): This currently fails, and is fixed by + // PR #14704. Uncomment this when moving to the main branch. + // var query2 = collection + // .whereField("key", isNotEqualTo: MinKey.shared)) + // .order(by: "key") + // try await assertSdkQueryResultsConsistentWithBackend( + // testDocs, + // collection: collection, + // query: query2, + // expectedResult: ["d", "e"] + // ) + + query = collection + .whereField("key", isGreaterThanOrEqualTo: MinKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["a", "b"] + ) + + query = collection + .whereField("key", isLessThanOrEqualTo: MinKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["a", "b"] + ) + + query = collection + .whereField("key", isGreaterThan: MinKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: MinKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: 1) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + } + + func testCanFilterAndOrderMaxKeyValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.shared], + "b": ["key": 1], + "c": ["key": MaxKey.shared], + "d": ["key": MaxKey.shared], + "e": ["key": NSNull()], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isEqualTo: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "d"] + ) + + // TODO(b/410032145): This currently fails, and is fixed by + // PR #14704. Uncomment this when moving to the main branch. + // query = collection + // .whereField("key", isNotEqualTo: MaxKey.shared)) + // .order(by: "key") + // try await assertSdkQueryResultsConsistentWithBackend( + // testDocs, + // collection: collection, + // query: query, + // expectedResult: ["a", "b"] + // ) + + query = collection + .whereField("key", isGreaterThanOrEqualTo: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "d"] + ) + + query = collection + .whereField("key", isLessThanOrEqualTo: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["c", "d"] + ) + + query = collection + .whereField("key", isGreaterThan: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isGreaterThan: 1) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [] + ) + } + + func testCanHandleNullWithBsonValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.shared], + "b": ["key": NSNull()], + "c": ["key": NSNull()], + "d": ["key": 1], + "e": ["key": MaxKey.shared], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isEqualTo: NSNull()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "c"] + ) + + query = collection + .whereField("key", isNotEqualTo: NSNull()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["a", "d", "e"] + ) + } + + func testCanOrderBsonValues() async throws { + // This test includes several BSON values of different types and ensures + // correct inter-type and intra-type order for BSON values. + let testDocs: [String: [String: Any]] = [ + "bsonObjectId1": ["key": BSONObjectId("507f191e810c19729de860ea")], + "bsonObjectId2": ["key": BSONObjectId("507f191e810c19729de860eb")], + "bsonObjectId3": ["key": BSONObjectId("407f191e810c19729de860ea")], + "regex1": ["key": RegexValue(pattern: "^bar", options: "m")], + "regex2": ["key": RegexValue(pattern: "^bar", options: "i")], + "regex3": ["key": RegexValue(pattern: "^baz", options: "i")], + "bsonTimestamp1": ["key": BSONTimestamp(seconds: 2, increment: 0)], + "bsonTimestamp2": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "bsonTimestamp3": ["key": BSONTimestamp(seconds: 1, increment: 1)], + "bsonBinary1": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "bsonBinary2": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "bsonBinary3": ["key": BSONBinaryData(subtype: 2, data: Data([1, 2, 2]))], + "int32Value1": ["key": Int32Value(-1)], + "int32Value2": ["key": Int32Value(1)], + "int32Value3": ["key": Int32Value(0)], + "minKey1": ["key": MinKey.shared], + "minKey2": ["key": MinKey.shared], + "maxKey1": ["key": MaxKey.shared], + "maxKey2": ["key": MaxKey.shared], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection.order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend(testDocs, + collection: collection, + query: query, expectedResult: [ + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + "int32Value2", + "int32Value3", + "int32Value1", + "minKey2", + "minKey1", + ]) + } + + func testCanOrderValuesOfDifferentTypes() async throws { + // This test has only 1 value of each type, and ensures correct order + // across all types. + let collection = collectionRef() + let testDocs: [String: [String: Any]] = [ + "nullValue": ["key": NSNull()], + "minValue": ["key": MinKey.shared], + "booleanValue": ["key": true], + "nanValue": ["key": Double.nan], + "int32Value": ["key": Int32Value(1)], + "doubleValue": ["key": 2.0], + "integerValue": ["key": 3], + "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], + "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "stringValue": ["key": "string"], + "bytesValue": ["key": Data([0, 1, 255])], + "bsonBinaryValue": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "referenceValue": ["key": collection.document("doc")], + "objectIdValue": ["key": BSONObjectId("507f191e810c19729de860ea")], + "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], + "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], + "arrayValue": ["key": [1, 2]], + "vectorValue": ["key": VectorValue([1.0, 2.0])], + "objectValue": ["key": ["a": 1]], + "maxValue": ["key": MaxKey.shared], + ] + + for (docId, data) in testDocs { + try await collection.document(docId).setData(data as [String: Any]) + } + + let orderedQuery = collection.order(by: "key") + + let expectedOrder = [ + "nullValue", + "minValue", + "booleanValue", + "nanValue", + "int32Value", + "doubleValue", + "integerValue", + "timestampValue", + "bsonTimestampValue", + "stringValue", + "bytesValue", + "bsonBinaryValue", + "referenceValue", + "objectIdValue", + "geoPointValue", + "regexValue", + "arrayValue", + "vectorValue", + "objectValue", + "maxValue", + ] + + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: orderedQuery, + expectedResult: expectedOrder + ) + } + + func testCanRunTransactionsOnDocumentsWithBsonTypes() async throws { + let testDocs = [ + "a": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "b": ["key": "placeholder"], + "c": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + try await runTransaction(collection.firestore, block: { transaction, errorPointer -> Any? in + transaction.setData( + ["key": RegexValue(pattern: "^foo", options: "i")], + forDocument: collection.document("b") + ) + transaction.deleteDocument(collection.document("c")) + return true + }) + + let snapshot = try await collection.getDocuments() + print("snapshot.size=") + print(snapshot.documents.count) + print(toDataArray(snapshot)) + XCTAssertEqual( + toDataArray(snapshot) as? [[String: RegexValue]], + [ + ["key": BSONTimestamp(seconds: 1, increment: 2)], + ["key": RegexValue(pattern: "^foo", options: "i")], + ] as? [[String: RegexValue]] + ) + } +} diff --git a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift index 406fb823a2a..c060dfb109f 100644 --- a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift @@ -76,6 +76,33 @@ class CodableIntegrationTests: FSTIntegrationTestCase { awaitExpectations() } + private struct ModelWithTestField: Codable { + var name: String + var testField: T + } + + private func assertCanWriteAndReadCodableValueWithAllFlavors(value: T) throws { + let model = ModelWithTestField( + name: "name", + testField: value + ) + + let docToWrite = documentRef() + + for flavor in allFlavors { + try setData(from: model, forDocument: docToWrite, withFlavor: flavor) + + let data = try readDocument(forRef: docToWrite).data(as: ModelWithTestField.self) + + XCTAssertEqual( + data.testField, + value, + "Failed with flavor \(flavor)" + ) + } + } + func testCodableRoundTrip() throws { struct Model: Codable, Equatable { var name: String @@ -84,6 +111,13 @@ class CodableIntegrationTests: FSTIntegrationTestCase { var geoPoint: GeoPoint var docRef: DocumentReference var vector: VectorValue + var regex: RegexValue + var int32: Int32Value + var minKey: MinKey + var maxKey: MaxKey + var bsonOjectId: BSONObjectId + var bsonTimestamp: BSONTimestamp + var bsonBinaryData: BSONBinaryData } let docToWrite = documentRef() let model = Model(name: "test", @@ -91,7 +125,14 @@ class CodableIntegrationTests: FSTIntegrationTestCase { ts: Timestamp(seconds: 987_654_321, nanoseconds: 0), geoPoint: GeoPoint(latitude: 45, longitude: 54), docRef: docToWrite, - vector: FieldValue.vector([0.7, 0.6])) + vector: FieldValue.vector([0.7, 0.6]), + regex: RegexValue(pattern: "^foo", options: "i"), + int32: Int32Value(1), + minKey: MinKey.shared, + maxKey: MaxKey.shared, + bsonOjectId: BSONObjectId("507f191e810c19729de860ec"), + bsonTimestamp: BSONTimestamp(seconds: 123, increment: 456), + bsonBinaryData: BSONBinaryData(subtype: 128, data: Data([1, 2]))) for flavor in allFlavors { try setData(from: model, forDocument: docToWrite, withFlavor: flavor) @@ -188,28 +229,44 @@ class CodableIntegrationTests: FSTIntegrationTestCase { } func testVectorValue() throws { - struct Model: Codable { - var name: String - var embedding: VectorValue - } - let model = Model( - name: "name", - embedding: VectorValue([0.1, 0.3, 0.4]) - ) + try assertCanWriteAndReadCodableValueWithAllFlavors(value: VectorValue([0.1, 0.3, 0.4])) + } - let docToWrite = documentRef() + func testMinKey() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MinKey.shared) + } - for flavor in allFlavors { - try setData(from: model, forDocument: docToWrite, withFlavor: flavor) + func testMaxKey() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MaxKey.shared) + } - let data = try readDocument(forRef: docToWrite).data(as: Model.self) + func testRegexValue() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: RegexValue( + pattern: "^foo", + options: "i" + )) + } - XCTAssertEqual( - data.embedding, - VectorValue([0.1, 0.3, 0.4]), - "Failed with flavor \(flavor)" - ) - } + func testInt32Value() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: Int32Value(123)) + } + + func testBsonObjectId() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BSONObjectId("507f191e810c19729de860ec") + ) + } + + func testBsonTimestamp() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BSONTimestamp(seconds: 123, increment: 456) + ) + } + + func testBsonBinaryData() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) + ) } func testDataBlob() throws { diff --git a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift index 61b4da23530..260772b5270 100644 --- a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift +++ b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift @@ -753,4 +753,87 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { eventAccumulator.assertNoAdditionalEvents() registration.remove() } + + func testCanListenToDocumentsWithBsonTypes() throws { + let collection = collectionRef() + let testData = [ + "a": ["key": MaxKey.shared], + "b": ["key": MinKey.shared], + "c": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "d": ["key": BSONObjectId("507f191e810c19729de860ea")], + "e": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "f": ["key": RegexValue(pattern: "^foo", options: "i")], + ] + + let query = collection.order(by: "key", descending: false) + let registration = query.addSnapshotListener(eventAccumulator.valueEventHandler) + + var querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, true) + + writeAllDocuments(testData, toCollection: collection) + + querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, false) + XCTAssertEqual( + querySnap.documents[0].data()["key"] as! MinKey, + testData["b"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[1].data()["key"] as! BSONTimestamp, + testData["c"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[2].data()["key"] as! BSONBinaryData, + testData["e"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[3].data()["key"] as! BSONObjectId, + testData["d"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[4].data()["key"] as! RegexValue, + testData["f"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[5].data()["key"] as! MaxKey, + testData["a"]!["key"] + ) + + let newData = ["key": Int32Value(2)] + collection.document("g").setData(newData) + + querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, false) + XCTAssertEqual( + querySnap.documents[0].data()["key"] as! MinKey, + testData["b"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[1].data()["key"] as! Int32Value, + newData["key"]! + ) + XCTAssertEqual( + querySnap.documents[2].data()["key"] as! BSONTimestamp, + testData["c"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[3].data()["key"] as! BSONBinaryData, + testData["e"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[4].data()["key"] as! BSONObjectId, + testData["d"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[5].data()["key"] as! RegexValue, + testData["f"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[6].data()["key"] as! MaxKey, + testData["a"]!["key"] + ) + + registration.remove() + } } diff --git a/Firestore/Swift/Tests/Integration/TypeTest.swift b/Firestore/Swift/Tests/Integration/TypeTest.swift new file mode 100644 index 00000000000..a7ef1804e16 --- /dev/null +++ b/Firestore/Swift/Tests/Integration/TypeTest.swift @@ -0,0 +1,375 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Combine +import FirebaseFirestore +import Foundation + +// iOS 15 required for test implementation, not BSON types +@available(iOS 13, tvOS 13, macOS 10.15, macCatalyst 13, watchOS 7, *) +class TypeTest: FSTIntegrationTestCase { + // Note: Type tests are missing from our Swift Integration tests. + // Below we're adding new tests for BSON types. + // TODO(b/403333631): Port other (non-BSON) tests to Swift. + + func expectRoundtrip(coll: CollectionReference, + data: [String: Any], + validateSnapshots: Bool = true, + expectedData: [String: Any]? = nil) async throws -> DocumentSnapshot { + let expectedData = expectedData ?? data + let docRef = coll.document("a") + + try await docRef.setData(data) + var docSnapshot = try await docRef.getDocument() + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + + try await docRef.updateData(data) + docSnapshot = try await docRef.getDocument() + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + + // Validate that the transaction API returns the same types + _ = try await db.runTransaction { transaction, errorPointer in + do { + let transactionSnapshot = try transaction.getDocument(docRef) + XCTAssertEqual( + transactionSnapshot.data() as NSDictionary?, + expectedData as NSDictionary? + ) + return nil // Transaction doesn't need to modify data in this test + } catch { + errorPointer?.pointee = error as NSError + return nil + } + } + + if validateSnapshots { + let querySnapshot = try await coll.getDocuments() + if let firstDoc = querySnapshot.documents.first { + docSnapshot = firstDoc + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + } else { + XCTFail("No documents found in collection snapshot") + } + + let expectation = XCTestExpectation(description: "Snapshot listener received data") + var listener: ListenerRegistration? + listener = coll.addSnapshotListener { snapshot, error in + guard let snapshot = snapshot, let firstDoc = snapshot.documents.first, + error == nil else { + XCTFail( + "Error fetching snapshot: \(error?.localizedDescription ?? "Unknown error")" + ) + expectation.fulfill() + return + } + XCTAssertEqual(firstDoc.data() as NSDictionary?, expectedData as NSDictionary?) + expectation.fulfill() + + // Stop listening after receiving the first snapshot + listener?.remove() + } + + // Wait for the listener to fire + await fulfillment(of: [expectation], timeout: 5.0) + } + + return docSnapshot + } + + /* + * A Note on Equality Tests: + * + * Since `isEqual` is a public Obj-c API, we should test that the + * `==` and `!=` operator in Swift is comparing objects correctly. + */ + + func testMinKeyEquality() { + let k1 = MinKey.shared + let k2 = MinKey.shared + XCTAssertTrue(k1 == k2) + XCTAssertFalse(k1 != k2) + } + + func testMaxKeyEquality() { + let k1 = MaxKey.shared + let k2 = MaxKey.shared + XCTAssertTrue(k1 == k2) + XCTAssertFalse(k1 != k2) + } + + func testRegexValueEquality() { + let v1 = RegexValue(pattern: "foo", options: "bar") + let v2 = RegexValue(pattern: "foo", options: "bar") + let v3 = RegexValue(pattern: "foo_3", options: "bar") + let v4 = RegexValue(pattern: "foo", options: "bar_4") + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testInt32ValueEquality() { + let v1 = Int32Value(1) + let v2 = Int32Value(1) + let v3 = Int32Value(-1) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + } + + func testBsonTimestampEquality() { + let v1 = BSONTimestamp(seconds: 1, increment: 1) + let v2 = BSONTimestamp(seconds: 1, increment: 1) + let v3 = BSONTimestamp(seconds: 1, increment: 2) + let v4 = BSONTimestamp(seconds: 2, increment: 1) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testBsonObjectIdEquality() { + let v1 = BSONObjectId("foo") + let v2 = BSONObjectId("foo") + let v3 = BSONObjectId("bar") + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + } + + func testBsonBinaryDataEquality() { + let v1 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v2 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v3 = BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) + let v4 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3, 4])) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testCanReadAndWriteMinKeyFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["min": MinKey.shared] + ) + } + + func testCanReadAndWriteMaxKeyFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["max": MaxKey.shared] + ) + } + + func testCanReadAndWriteRegexFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["regex": RegexValue(pattern: "^foo", options: "i")] + ) + } + + func testCanReadAndWriteInt32Fields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["int32": Int32Value(1)] + ) + } + + func testCanReadAndWriteBsonTimestampFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonTimestamp": BSONTimestamp(seconds: 1, increment: 2)] + ) + } + + func testCanReadAndWriteBsonObjectIdFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonObjectId": BSONObjectId("507f191e810c19729de860ea")] + ) + } + + func testCanReadAndWriteBsonBinaryDataFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))] + ) + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BSONBinaryData(subtype: 128, data: Data([1, 2, 3]))] + ) + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BSONBinaryData(subtype: 255, data: Data([]))] + ) + } + + func testCanReadAndWriteBsonFieldsInAnArray() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["array": [ + BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + BSONObjectId("507f191e810c19729de860ea"), + BSONTimestamp(seconds: 123, increment: 456), + Int32Value(1), + MinKey.shared, + MaxKey.shared, + RegexValue(pattern: "^foo", options: "i"), + ]] + ) + } + + func testCanReadAndWriteBsonFieldsInAnObject() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["array": [ + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), + "bsonTimestamp": BSONTimestamp(seconds: 123, increment: 456), + "int32": Int32Value(1), + "min": MinKey.shared, + "max": MaxKey.shared, + "regex": RegexValue(pattern: "^foo", options: "i"), + ]] + ) + } + + func testInvalidRegexValueGetsRejected() async throws { + let docRef = collectionRef().document("test-doc") + var errorMessage: String? + + do { + // Using an invalid regex option "a" + try await docRef.setData(["key": RegexValue(pattern: "foo", options: "a")]) + XCTFail("Expected error for invalid regex option") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue( + errorMessage! + .contains("Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'."), + "Unexpected error message: \(errorMessage ?? "nil")" + ) + } + } + + func testInvalidBsonObjectIdValueGetsRejected() async throws { + let docRef = collectionRef().document("test-doc") + var errorMessage: String? + + do { + // BSONObjectId with string length not equal to 24 + try await docRef.setData(["key": BSONObjectId("foo")]) + XCTFail("Expected error for invalid BSON Object ID string length") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue( + errorMessage!.contains("Object ID hex string has incorrect length."), + "Unexpected error message: \(errorMessage ?? "nil")" + ) + } + } + + func testCanOrderValuesOfDifferentTypeOrderTogether() async throws { + let collection = collectionRef() + let testDocs: [String: [String: Any?]] = [ + "nullValue": ["key": NSNull()], + "minValue": ["key": MinKey.shared], + "booleanValue": ["key": true], + "nanValue": ["key": Double.nan], + "int32Value": ["key": Int32Value(1)], + "doubleValue": ["key": 2.0], + "integerValue": ["key": 3], + "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], + "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "stringValue": ["key": "string"], + "bytesValue": ["key": Data([0, 1, 255])], + "bsonBinaryValue": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "referenceValue": ["key": collection.document("doc")], + "objectIdValue": ["key": BSONObjectId("507f191e810c19729de860ea")], + "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], + "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], + "arrayValue": ["key": [1, 2]], + "vectorValue": ["key": VectorValue([1.0, 2.0])], + "objectValue": ["key": ["a": 1]], + "maxValue": ["key": MaxKey.shared], + ] + + for (docId, data) in testDocs { + try await collection.document(docId).setData(data as [String: Any]) + } + + let orderedQuery = collection.order(by: "key") + let snapshot = try await orderedQuery.getDocuments() + + let expectedOrder = [ + "nullValue", + "minValue", + "booleanValue", + "nanValue", + "int32Value", + "doubleValue", + "integerValue", + "timestampValue", + "bsonTimestampValue", + "stringValue", + "bytesValue", + "bsonBinaryValue", + "referenceValue", + "objectIdValue", + "geoPointValue", + "regexValue", + "arrayValue", + "vectorValue", + "objectValue", + "maxValue", + ] + + XCTAssertEqual(snapshot.documents.count, testDocs.count) + + for i in 0 ..< snapshot.documents.count { + let actualDocSnapshot = snapshot.documents[i] + let actualKeyValue = actualDocSnapshot.data()["key"] + let expectedDocId = expectedOrder[i] + let expectedKeyValue = testDocs[expectedDocId]!["key"] + + XCTAssertEqual(actualDocSnapshot.documentID, expectedDocId) + + // Since we have a 'nullValue' case, we should use `as?`. + XCTAssert(actualKeyValue as? NSObject == expectedKeyValue as? NSObject) + } + } +} diff --git a/Firestore/core/CMakeLists.txt b/Firestore/core/CMakeLists.txt index 65238165549..49600ce193e 100644 --- a/Firestore/core/CMakeLists.txt +++ b/Firestore/core/CMakeLists.txt @@ -323,6 +323,7 @@ add_subdirectory(test/unit/bundle) add_subdirectory(test/unit/credentials) add_subdirectory(test/unit/core) add_subdirectory(test/unit/immutable) +add_subdirectory(test/unit/index) add_subdirectory(test/unit/local) add_subdirectory(test/unit/model) add_subdirectory(test/unit/objc) diff --git a/Firestore/core/src/core/target.cc b/Firestore/core/src/core/target.cc index 3002a955da1..20423be2c93 100644 --- a/Firestore/core/src/core/target.cc +++ b/Firestore/core/src/core/target.cc @@ -207,13 +207,13 @@ IndexBoundValues Target::GetUpperBound( Target::IndexBoundValue Target::GetAscendingBound( const Segment& segment, const absl::optional& bound) const { - google_firestore_v1_Value segment_value = model::MinValue(); + google_firestore_v1_Value segment_value = model::InternalMinValue(); bool segment_inclusive = true; // Process all filters to find a value for the current field segment for (const auto& field_filter : GetFieldFiltersForPath(segment.field_path())) { - google_firestore_v1_Value filter_value = model::MinValue(); + google_firestore_v1_Value filter_value = model::InternalMinValue(); bool filter_inclusive = true; switch (field_filter.op()) { @@ -232,7 +232,7 @@ Target::IndexBoundValue Target::GetAscendingBound( break; case FieldFilter::Operator::NotEqual: case FieldFilter::Operator::NotIn: - filter_value = model::MinValue(); + filter_value = model::MinKeyValue(); break; default: // Remaining filters cannot be used as bound. @@ -271,13 +271,13 @@ Target::IndexBoundValue Target::GetAscendingBound( Target::IndexBoundValue Target::GetDescendingBound( const Segment& segment, const absl::optional& bound) const { - google_firestore_v1_Value segment_value = model::MaxValue(); + google_firestore_v1_Value segment_value = model::InternalMaxValue(); bool segment_inclusive = true; // Process all filters to find a value for the current field segment for (const auto& field_filter : GetFieldFiltersForPath(segment.field_path())) { - google_firestore_v1_Value filter_value = model::MaxValue(); + google_firestore_v1_Value filter_value = model::InternalMaxValue(); bool filter_inclusive = true; switch (field_filter.op()) { @@ -297,7 +297,7 @@ Target::IndexBoundValue Target::GetDescendingBound( break; case FieldFilter::Operator::NotIn: case FieldFilter::Operator::NotEqual: - filter_value = model::MaxValue(); + filter_value = model::MaxKeyValue(); break; default: // Remaining filters cannot be used as bound. diff --git a/Firestore/core/src/core/target.h b/Firestore/core/src/core/target.h index e22543a5b3e..645907f5e46 100644 --- a/Firestore/core/src/core/target.h +++ b/Firestore/core/src/core/target.h @@ -137,7 +137,7 @@ class Target { * Returns a lower bound of field values that can be used as a starting point * to scan the index defined by `field_index`. * - * Returns `model::MinValue()` if no lower bound exists. + * Returns `model::InternalMinValue()` if no lower bound exists. */ IndexBoundValues GetLowerBound(const model::FieldIndex& field_index) const; @@ -145,7 +145,7 @@ class Target { * Returns an upper bound of field values that can be used as an ending point * when scanning the index defined by `field_index`. * - * Returns `model::MaxValue()` if no upper bound exists. + * Returns `model::InternalMaxValue()` if no upper bound exists. */ IndexBoundValues GetUpperBound(const model::FieldIndex& field_index) const; diff --git a/Firestore/core/src/index/firestore_index_value_writer.cc b/Firestore/core/src/index/firestore_index_value_writer.cc index 4587844b930..bfeaa4dfbef 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.cc +++ b/Firestore/core/src/index/firestore_index_value_writer.cc @@ -19,42 +19,26 @@ #include #include #include +#include #include "Firestore/core/src/model/resource_path.h" #include "Firestore/core/src/model/value_util.h" #include "Firestore/core/src/nanopb/nanopb_util.h" +#include "absl/strings/str_split.h" + namespace firebase { namespace firestore { namespace index { namespace { -// Note: This code is copied from the backend. Code that is not used by -// Firestore was removed. +// Note: This file is copied from the backend. Code that is not used by +// Firestore was removed. Code that has different behavior was modified. // The client SDK only supports references to documents from the same database. // We can skip the first five segments. constexpr int DocumentNameOffset = 5; -enum IndexType { - kNull = 5, - kBoolean = 10, - kNan = 13, - kNumber = 15, - kTimestamp = 20, - kString = 25, - kBlob = 30, - kReference = 37, - kGeopoint = 45, - kArray = 50, - kVector = 53, - kMap = 55, - kReferenceSegment = 60, - // A terminator that indicates that a truncatable value was not truncated. - // This must be smaller than all other type labels. - kNotTruncated = 2 -}; - void WriteValueTypeLabel(DirectionalIndexByteEncoder* encoder, int type_order) { encoder->WriteLong(type_order); } @@ -86,11 +70,16 @@ void WriteIndexEntityRef(pb_bytes_array_t* reference_value, DirectionalIndexByteEncoder* encoder) { WriteValueTypeLabel(encoder, IndexType::kReference); - auto path = model::ResourcePath::FromStringView( - nanopb::MakeStringView(reference_value)); - auto num_segments = path.size(); + // We must allow empty strings. We could be dealing with a reference_value + // with empty segmenets. The reference value has the following format: + // projects//databases//documents// + // So we may have something like: + // projects//databases//documents/coll_1/doc_1 + std::vector segments = absl::StrSplit( + nanopb::MakeStringView(reference_value), '/', absl::AllowEmpty()); + auto num_segments = segments.size(); for (size_t index = DocumentNameOffset; index < num_segments; ++index) { - const std::string& segment = path[index]; + const std::string& segment = segments[index]; WriteValueTypeLabel(encoder, IndexType::kReferenceSegment); WriteUnlabeledIndexString(segment, encoder); } @@ -141,6 +130,78 @@ void WriteIndexMap(google_firestore_v1_MapValue map_index_value, } } +void WriteIndexBsonBinaryData( + const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonBinaryData); + encoder->WriteBytes(map_index_value.fields[0].value.bytes_value); + WriteTruncationMarker(encoder); +} + +void WriteIndexBsonObjectId(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonObjectId); + encoder->WriteBytes(map_index_value.fields[0].value.string_value); +} + +void WriteIndexBsonTimestamp( + const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonTimestamp); + + // Figure out the seconds and increment value. + const google_firestore_v1_MapValue& inner_map = + map_index_value.fields[0].value.map_value; + absl::optional seconds_index = model::IndexOfKey( + inner_map, model::kRawBsonTimestampTypeSecondsFieldValue, + model::kBsonTimestampTypeSecondsFieldValue); + absl::optional increment_index = model::IndexOfKey( + inner_map, model::kRawBsonTimestampTypeIncrementFieldValue, + model::kBsonTimestampTypeIncrementFieldValue); + const int64_t seconds = + inner_map.fields[seconds_index.value()].value.integer_value; + const int64_t increment = + inner_map.fields[increment_index.value()].value.integer_value; + + // BsonTimestamp is encoded as a 64-bit long. + int64_t value_to_encode = (seconds << 32) | (increment & 0xFFFFFFFFL); + encoder->WriteLong(value_to_encode); +} + +void WriteIndexRegexValue(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kRegex); + + // Figure out the pattern and options. + const google_firestore_v1_MapValue& inner_map = + map_index_value.fields[0].value.map_value; + absl::optional pattern_index = + model::IndexOfKey(inner_map, model::kRawRegexTypePatternFieldValue, + model::kRegexTypePatternFieldValue); + absl::optional options_index = + model::IndexOfKey(inner_map, model::kRawRegexTypeOptionsFieldValue, + model::kRegexTypeOptionsFieldValue); + const auto& pattern = + inner_map.fields[pattern_index.value()].value.string_value; + const auto& options = + inner_map.fields[options_index.value()].value.string_value; + + // Write pattern and then options. + WriteUnlabeledIndexString(pattern, encoder); + WriteUnlabeledIndexString(options, encoder); + + // Also needs truncation marker. + WriteTruncationMarker(encoder); +} + +void WriteIndexInt32Value(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kNumber); + // Similar to 64-bit integers (see integer_value below), we write 32-bit + // integers as double so that 0 and 0.0 are considered the same. + encoder->WriteDouble(map_index_value.fields[0].value.integer_value); +} + void WriteIndexValueAux(const google_firestore_v1_Value& index_value, DirectionalIndexByteEncoder* encoder) { switch (index_value.which_value_type) { @@ -205,15 +266,38 @@ void WriteIndexValueAux(const google_firestore_v1_Value& index_value, break; } case google_firestore_v1_Value_map_value_tag: - // model::MaxValue() is sentinel map value (see the comment there). - // In that case, we encode the max int value instead. - if (model::IsMaxValue(index_value)) { + // model::InternalMaxValue() is a sentinel map value (see the comment + // there). In that case, we encode the max int value instead. + if (model::IsInternalMaxValue(index_value)) { WriteValueTypeLabel(encoder, std::numeric_limits::max()); break; } else if (model::IsVectorValue(index_value)) { WriteIndexVector(index_value.map_value, encoder); break; + } else if (model::IsMaxKeyValue(index_value)) { + WriteValueTypeLabel(encoder, IndexType::kMaxKey); + break; + } else if (model::IsMinKeyValue(index_value)) { + WriteValueTypeLabel(encoder, IndexType::kMinKey); + break; + } else if (model::IsBsonBinaryData(index_value)) { + WriteIndexBsonBinaryData(index_value.map_value, encoder); + break; + } else if (model::IsRegexValue(index_value)) { + WriteIndexRegexValue(index_value.map_value, encoder); + break; + } else if (model::IsBsonTimestamp(index_value)) { + WriteIndexBsonTimestamp(index_value.map_value, encoder); + break; + } else if (model::IsBsonObjectId(index_value)) { + WriteIndexBsonObjectId(index_value.map_value, encoder); + break; + } else if (model::IsInt32Value(index_value)) { + WriteIndexInt32Value(index_value.map_value, encoder); + break; } + + // For regular maps: WriteIndexMap(index_value.map_value, encoder); WriteTruncationMarker(encoder); break; diff --git a/Firestore/core/src/index/firestore_index_value_writer.h b/Firestore/core/src/index/firestore_index_value_writer.h index 54ac559f2d5..6963b95b6a0 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.h +++ b/Firestore/core/src/index/firestore_index_value_writer.h @@ -24,6 +24,31 @@ namespace firebase { namespace firestore { namespace index { +enum IndexType { + kNull = 5, + kMinKey = 7, + kBoolean = 10, + kNan = 13, + kNumber = 15, + kTimestamp = 20, + kBsonTimestamp = 22, + kString = 25, + kBlob = 30, + kBsonBinaryData = 31, + kReference = 37, + kBsonObjectId = 43, + kGeopoint = 45, + kRegex = 47, + kArray = 50, + kVector = 53, + kMap = 55, + kReferenceSegment = 60, + kMaxKey = 999, + // A terminator that indicates that a truncatable value was not truncated. + // This must be smaller than all other type labels. + kNotTruncated = 2 +}; + /** * Writes an index value using the given encoder. The encoder writes the encoded * bytes into a buffer maintained by `IndexEncodingBuffer` who owns the diff --git a/Firestore/core/src/model/object_value.cc b/Firestore/core/src/model/object_value.cc index 3b812fe535d..b5a74d0f427 100644 --- a/Firestore/core/src/model/object_value.cc +++ b/Firestore/core/src/model/object_value.cc @@ -270,7 +270,8 @@ FieldMask ObjectValue::ExtractFieldMask( const google_firestore_v1_MapValue_FieldsEntry& entry = value.fields[i]; FieldPath current_path{MakeString(entry.key)}; - if (!IsMap(entry.value)) { + // BSON types do not need to extract reserved keys such as '__regex__', etc. + if (!IsMap(entry.value) || IsBsonType(entry.value)) { fields.insert(std::move(current_path)); continue; } diff --git a/Firestore/core/src/model/value_util.cc b/Firestore/core/src/model/value_util.cc index f363d2d7090..e96cfb8707b 100644 --- a/Firestore/core/src/model/value_util.cc +++ b/Firestore/core/src/model/value_util.cc @@ -52,9 +52,9 @@ pb_bytes_array_s* kTypeValueFieldKey = nanopb::MakeBytesArray(kRawTypeValueFieldKey); /** The field value of a maximum proto value. */ -const char* kRawMaxValueFieldValue = "__max__"; -pb_bytes_array_s* kMaxValueFieldValue = - nanopb::MakeBytesArray(kRawMaxValueFieldValue); +const char* kRawInternalMaxValueFieldValue = "__max__"; +pb_bytes_array_s* kInternalMaxValueFieldValue = + nanopb::MakeBytesArray(kRawInternalMaxValueFieldValue); /** The type of a VectorValue proto. */ const char* kRawVectorTypeFieldValue = "__vector__"; @@ -66,6 +66,101 @@ const char* kRawVectorValueFieldKey = "value"; pb_bytes_array_s* kVectorValueFieldKey = nanopb::MakeBytesArray(kRawVectorValueFieldKey); +/** The key of a MinKey in a map proto. */ +const char* kRawMinKeyTypeFieldValue = "__min__"; +pb_bytes_array_s* kMinKeyTypeFieldValue = + nanopb::MakeBytesArray(kRawMinKeyTypeFieldValue); + +/** The key of a MaxKey in a map proto. */ +const char* kRawMaxKeyTypeFieldValue = "__max__"; +pb_bytes_array_s* kMaxKeyTypeFieldValue = + nanopb::MakeBytesArray(kRawMaxKeyTypeFieldValue); + +/** The key of a regex in a map proto. */ +const char* kRawRegexTypeFieldValue = "__regex__"; +pb_bytes_array_s* kRegexTypeFieldValue = + nanopb::MakeBytesArray(kRawRegexTypeFieldValue); + +/** The regex pattern key. */ +const char* kRawRegexTypePatternFieldValue = "pattern"; +pb_bytes_array_s* kRegexTypePatternFieldValue = + nanopb::MakeBytesArray(kRawRegexTypePatternFieldValue); + +/** The regex options key. */ +const char* kRawRegexTypeOptionsFieldValue = "options"; +pb_bytes_array_s* kRegexTypeOptionsFieldValue = + nanopb::MakeBytesArray(kRawRegexTypeOptionsFieldValue); + +/** The key of an int32 in a map proto. */ +const char* kRawInt32TypeFieldValue = "__int__"; +pb_bytes_array_s* kInt32TypeFieldValue = + nanopb::MakeBytesArray(kRawInt32TypeFieldValue); + +/** The key of a BSON ObjectId in a map proto. */ +const char* kRawBsonObjectIdTypeFieldValue = "__oid__"; +pb_bytes_array_s* kBsonObjectIdTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonObjectIdTypeFieldValue); + +/** The key of a BSON Timestamp in a map proto. */ +const char* kRawBsonTimestampTypeFieldValue = "__request_timestamp__"; +pb_bytes_array_s* kBsonTimestampTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeFieldValue); + +/** The key of a BSON Timestamp seconds in a map proto. */ +const char* kRawBsonTimestampTypeSecondsFieldValue = "seconds"; +pb_bytes_array_s* kBsonTimestampTypeSecondsFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeSecondsFieldValue); + +/** The key of a BSON Timestamp increment in a map proto. */ +const char* kRawBsonTimestampTypeIncrementFieldValue = "increment"; +pb_bytes_array_s* kBsonTimestampTypeIncrementFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeIncrementFieldValue); + +/** The key of a BSON Binary Data in a map proto. */ +const char* kRawBsonBinaryDataTypeFieldValue = "__binary__"; +pb_bytes_array_s* kBsonBinaryDataTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonBinaryDataTypeFieldValue); + +MapType DetectMapType(const google_firestore_v1_Value& value) { + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count == 0) { + return MapType::kNormal; + } + + // Check for type-based mappings + if (IsServerTimestamp(value)) { + return MapType::kServerTimestamp; + } else if (IsInternalMaxValue(value)) { + return MapType::kInternalMaxValue; + } else if (IsVectorValue(value)) { + return MapType::kVector; + } + + // Check for BSON-related mappings + if (value.map_value.fields_count != 1) { + // All BSON types have 1 key in the map. To improve performance, we can + // return early if the map is empty or has more than 1 key. + return MapType::kNormal; + } else if (IsMinKeyValue(value)) { + return MapType::kMinKey; + } + if (IsMaxKeyValue(value)) { + return MapType::kMaxKey; + } else if (IsRegexValue(value)) { + return MapType::kRegex; + } else if (IsInt32Value(value)) { + return MapType::kInt32; + } else if (IsBsonObjectId(value)) { + return MapType::kBsonObjectId; + } else if (IsBsonTimestamp(value)) { + return MapType::kBsonTimestamp; + } else if (IsBsonBinaryData(value)) { + return MapType::kBsonBinaryData; + } + + return MapType::kNormal; +} + TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { switch (value.which_value_type) { case google_firestore_v1_Value_null_value_tag: @@ -97,14 +192,31 @@ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { return TypeOrder::kArray; case google_firestore_v1_Value_map_value_tag: { - if (IsServerTimestamp(value)) { - return TypeOrder::kServerTimestamp; - } else if (IsMaxValue(value)) { - return TypeOrder::kMaxValue; - } else if (IsVectorValue(value)) { - return TypeOrder::kVector; + switch (DetectMapType(value)) { + case MapType::kServerTimestamp: + return TypeOrder::kServerTimestamp; + case MapType::kInternalMaxValue: + return TypeOrder::kInternalMaxValue; + case MapType::kVector: + return TypeOrder::kVector; + case MapType::kMinKey: + return TypeOrder::kMinKey; + case MapType::kMaxKey: + return TypeOrder::kMaxKey; + case MapType::kRegex: + return TypeOrder::kRegex; + case MapType::kInt32: + return TypeOrder::kNumber; + case MapType::kBsonObjectId: + return TypeOrder::kBsonObjectId; + case MapType::kBsonTimestamp: + return TypeOrder::kBsonTimestamp; + case MapType::kBsonBinaryData: + return TypeOrder::kBsonBinaryData; + case MapType::kNormal: + default: + return TypeOrder::kMap; } - return TypeOrder::kMap; } default: @@ -145,13 +257,21 @@ ComparisonResult CompareNumbers(const google_firestore_v1_Value& left, double left_double = left.double_value; if (right.which_value_type == google_firestore_v1_Value_double_value_tag) { return util::Compare(left_double, right.double_value); + } else if (IsInt32Value(right)) { + return util::CompareMixedNumber( + left_double, right.map_value.fields[0].value.integer_value); } else { return util::CompareMixedNumber(left_double, right.integer_value); } } else { - int64_t left_long = left.integer_value; + int64_t left_long = IsInt32Value(left) + ? left.map_value.fields[0].value.integer_value + : left.integer_value; if (right.which_value_type == google_firestore_v1_Value_integer_value_tag) { return util::Compare(left_long, right.integer_value); + } else if (IsInt32Value(right)) { + return util::Compare(left_long, + right.map_value.fields[0].value.integer_value); } else { return util::ReverseOrder( util::CompareMixedNumber(right.double_value, left_long)); @@ -299,6 +419,124 @@ ComparisonResult CompareVectors(const google_firestore_v1_Value& left, return CompareArrays(leftArray, rightArray); } +ComparisonResult CompareRegexValues(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT(IsRegexValue(left) && IsRegexValue(right), + "Cannot compare non-regex values as regex values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + const google_firestore_v1_MapValue& left_inner_map_value = + left.map_value.fields[0].value.map_value; + const google_firestore_v1_MapValue& right_inner_map_value = + right.map_value.fields[0].value.map_value; + + // Find the left and right patterns. + absl::optional left_pattern_index = + IndexOfKey(left_inner_map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + const auto& left_pattern_value = + left_inner_map_value.fields[left_pattern_index.value()].value; + absl::optional right_pattern_index = + IndexOfKey(right_inner_map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + const auto& right_pattern_value = + right_inner_map_value.fields[right_pattern_index.value()].value; + + // First compare patterns. + const auto compare_patterns = + CompareStrings(left_pattern_value, right_pattern_value); + if (compare_patterns != ComparisonResult::Same) { + return compare_patterns; + } + + // Find the left and right options. + absl::optional left_options_index = + IndexOfKey(left_inner_map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + const auto& left_options_value = + left_inner_map_value.fields[left_options_index.value()].value; + absl::optional right_options_index = + IndexOfKey(right_inner_map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + const auto& right_options_value = + right_inner_map_value.fields[right_options_index.value()].value; + + // If patterns are equal, compare the options. + return CompareStrings(left_options_value, right_options_value); +} + +ComparisonResult CompareBsonObjectId(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT(IsBsonObjectId(left) && IsBsonObjectId(right), + "Cannot compare non-BsonObjectId values as BsonObjectId values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + return CompareStrings(left.map_value.fields[0].value, + right.map_value.fields[0].value); +} + +ComparisonResult CompareBsonTimestamp(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT( + IsBsonTimestamp(left) && IsBsonTimestamp(right), + "Cannot compare non-BsonTimestamp values as BsonTimestamp values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + const google_firestore_v1_MapValue& left_inner_map_value = + left.map_value.fields[0].value.map_value; + const google_firestore_v1_MapValue& right_inner_map_value = + right.map_value.fields[0].value.map_value; + + // Find the left and right 'seconds'. + absl::optional left_seconds_index = + IndexOfKey(left_inner_map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + const auto& left_seconds_value = + left_inner_map_value.fields[left_seconds_index.value()].value; + absl::optional right_seconds_index = + IndexOfKey(right_inner_map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + const auto& right_seconds_value = + right_inner_map_value.fields[right_seconds_index.value()].value; + + // First compare 'seconds'. + const auto compare_seconds = + CompareNumbers(left_seconds_value, right_seconds_value); + if (compare_seconds != ComparisonResult::Same) { + return compare_seconds; + } + + // Find the left and right 'increment'. + absl::optional left_increment_index = + IndexOfKey(left_inner_map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + const auto& left_increment_value = + left_inner_map_value.fields[left_increment_index.value()].value; + absl::optional right_increment_index = IndexOfKey( + right_inner_map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + const auto& right_increment_value = + right_inner_map_value.fields[right_increment_index.value()].value; + + // If 'seconds' are equal, compare the 'increment'. + return CompareNumbers(left_increment_value, right_increment_value); +} + +ComparisonResult CompareBsonBinaryData(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT( + IsBsonBinaryData(left) && IsBsonBinaryData(right), + "Cannot compare non-BsonBinaryData values as BsonBinaryData values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + return CompareBlobs(left.map_value.fields[0].value, + right.map_value.fields[0].value); +} + ComparisonResult Compare(const google_firestore_v1_Value& left, const google_firestore_v1_Value& right) { TypeOrder left_type = GetTypeOrder(left); @@ -310,6 +548,11 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, switch (left_type) { case TypeOrder::kNull: + case TypeOrder::kInternalMaxValue: + // All MinKeys are equal. + case TypeOrder::kMinKey: + // All MaxKeys are equal. + case TypeOrder::kMaxKey: return ComparisonResult::Same; case TypeOrder::kBoolean: @@ -337,6 +580,18 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, case TypeOrder::kGeoPoint: return CompareGeoPoints(left, right); + case TypeOrder::kRegex: + return CompareRegexValues(left, right); + + case TypeOrder::kBsonObjectId: + return CompareBsonObjectId(left, right); + + case TypeOrder::kBsonTimestamp: + return CompareBsonTimestamp(left, right); + + case TypeOrder::kBsonBinaryData: + return CompareBsonBinaryData(left, right); + case TypeOrder::kArray: return CompareArrays(left, right); @@ -346,9 +601,6 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, case TypeOrder::kVector: return CompareVectors(left, right); - case TypeOrder::kMaxValue: - return util::ComparisonResult::Same; - default: HARD_FAIL("Invalid type value: %s", left_type); } @@ -400,6 +652,9 @@ bool NumberEquals(const google_firestore_v1_Value& left, right.which_value_type == google_firestore_v1_Value_double_value_tag) { return util::DoubleBitwiseEquals(left.double_value, right.double_value); + } else if (IsInt32Value(left) && IsInt32Value(right)) { + return left.map_value.fields[0].value.integer_value == + right.map_value.fields[0].value.integer_value; } return false; } @@ -437,6 +692,8 @@ bool Equals(const google_firestore_v1_Value& lhs, switch (left_type) { case TypeOrder::kNull: + case TypeOrder::kMinKey: + case TypeOrder::kMaxKey: return true; case TypeOrder::kBoolean: @@ -474,11 +731,21 @@ bool Equals(const google_firestore_v1_Value& lhs, case TypeOrder::kArray: return ArrayEquals(lhs.array_value, rhs.array_value); + case TypeOrder::kRegex: + return CompareRegexValues(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonObjectId: + return CompareBsonObjectId(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonTimestamp: + return CompareBsonTimestamp(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonBinaryData: + return CompareBsonBinaryData(lhs, rhs) == ComparisonResult::Same; + case TypeOrder::kVector: case TypeOrder::kMap: - return MapValueEquals(lhs.map_value, rhs.map_value); - - case TypeOrder::kMaxValue: + case TypeOrder::kInternalMaxValue: return MapValueEquals(lhs.map_value, rhs.map_value); default: @@ -631,6 +898,21 @@ google_firestore_v1_Value GetLowerBound( case google_firestore_v1_Value_map_value_tag: { if (IsVectorValue(value)) { return MinVector(); + } else if (IsBsonObjectId(value)) { + return MinBsonObjectId(); + } else if (IsBsonTimestamp(value)) { + return MinBsonTimestamp(); + } else if (IsBsonBinaryData(value)) { + return MinBsonBinaryData(); + } else if (IsRegexValue(value)) { + return MinRegex(); + } else if (IsInt32Value(value)) { + // int32Value is treated the same as integerValue and doubleValue. + return MinNumber(); + } else if (IsMinKeyValue(value)) { + return MinKeyValue(); + } else if (IsMaxKeyValue(value)) { + return MaxKeyValue(); } return MinMap(); @@ -645,29 +927,47 @@ google_firestore_v1_Value GetUpperBound( const google_firestore_v1_Value& value) { switch (value.which_value_type) { case google_firestore_v1_Value_null_value_tag: - return MinBoolean(); + return MinKeyValue(); case google_firestore_v1_Value_boolean_value_tag: return MinNumber(); case google_firestore_v1_Value_integer_value_tag: case google_firestore_v1_Value_double_value_tag: return MinTimestamp(); case google_firestore_v1_Value_timestamp_value_tag: - return MinString(); + return MinBsonTimestamp(); case google_firestore_v1_Value_string_value_tag: return MinBytes(); case google_firestore_v1_Value_bytes_value_tag: - return MinReference(); + return MinBsonBinaryData(); case google_firestore_v1_Value_reference_value_tag: - return MinGeoPoint(); + return MinBsonObjectId(); case google_firestore_v1_Value_geo_point_value_tag: - return MinArray(); + return MinRegex(); case google_firestore_v1_Value_array_value_tag: return MinVector(); case google_firestore_v1_Value_map_value_tag: if (IsVectorValue(value)) { return MinMap(); + } else if (IsMinKeyValue(value)) { + return MinBoolean(); + } else if (IsInt32Value(value)) { + // int32Value is treated the same as integerValue and doubleValue. + return MinTimestamp(); + } else if (IsBsonTimestamp(value)) { + return MinString(); + } else if (IsBsonBinaryData(value)) { + return MinReference(); + } else if (IsBsonObjectId(value)) { + return MinGeoPoint(); + } else if (IsRegexValue(value)) { + return MinArray(); + } else if (IsMaxKeyValue(value)) { + // The upper bound for MaxKey is the internal max value. + return InternalMaxValue(); } - return MaxValue(); + + // For normal maps, the upper bound is MaxKey. + return MaxKeyValue(); default: HARD_FAIL("Invalid type value: %s", value.which_value_type); } @@ -694,14 +994,14 @@ bool IsNullValue(const google_firestore_v1_Value& value) { return value.which_value_type == google_firestore_v1_Value_null_value_tag; } -google_firestore_v1_Value MinValue() { +google_firestore_v1_Value InternalMinValue() { google_firestore_v1_Value null_value; null_value.which_value_type = google_firestore_v1_Value_null_value_tag; null_value.null_value = {}; return null_value; } -bool IsMinValue(const google_firestore_v1_Value& value) { +bool IsInternalMinValue(const google_firestore_v1_Value& value) { return IsNullValue(value); } @@ -710,10 +1010,10 @@ bool IsMinValue(const google_firestore_v1_Value& value) { * values. Underlying it is a map value with a special map field that SDK user * cannot possibly construct. */ -google_firestore_v1_Value MaxValue() { +google_firestore_v1_Value InternalMaxValue() { google_firestore_v1_Value value; value.which_value_type = google_firestore_v1_Value_string_value_tag; - value.string_value = kMaxValueFieldValue; + value.string_value = kInternalMaxValueFieldValue; // Make `field_entry` static so that it has a memory address that outlives // this function's scope; otherwise, using its address in the `map_value` @@ -738,7 +1038,7 @@ google_firestore_v1_Value MaxValue() { return max_value; } -bool IsMaxValue(const google_firestore_v1_Value& value) { +bool IsInternalMaxValue(const google_firestore_v1_Value& value) { if (value.which_value_type != google_firestore_v1_Value_map_value_tag) { return false; } @@ -762,9 +1062,10 @@ bool IsMaxValue(const google_firestore_v1_Value& value) { // Comparing the pointer address, then actual content if addresses are // different. - return value.map_value.fields[0].value.string_value == kMaxValueFieldValue || + return value.map_value.fields[0].value.string_value == + kInternalMaxValueFieldValue || nanopb::MakeStringView(value.map_value.fields[0].value.string_value) == - kRawMaxValueFieldValue; + kRawInternalMaxValueFieldValue; } absl::optional IndexOfKey( @@ -826,6 +1127,260 @@ bool IsVectorValue(const google_firestore_v1_Value& value) { return true; } +bool IsMinKeyValue(const google_firestore_v1_Value& value) { + // A MinKey is expected to be a map as follows: { "__min__": null } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a '__min__' key. + absl::optional min_key_field_index = IndexOfKey( + value.map_value, kRawMinKeyTypeFieldValue, kMinKeyTypeFieldValue); + if (!min_key_field_index.has_value()) { + return false; + } + + // The inner value should be null. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_null_value_tag) { + return false; + } + + return true; +} + +bool IsMaxKeyValue(const google_firestore_v1_Value& value) { + // A MaxKey is expected to be a map as follows: { "__max__": null } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a '__max__' key. + absl::optional max_key_field_index = IndexOfKey( + value.map_value, kRawMaxKeyTypeFieldValue, kMaxKeyTypeFieldValue); + if (!max_key_field_index.has_value()) { + return false; + } + + // The inner value should be null. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_null_value_tag) { + return false; + } + + return true; +} + +bool IsRegexValue(const google_firestore_v1_Value& value) { + // A regex is expected to be a map as follows: + // { + // "__regex__": { + // "pattern": "...", + // "options": "..." + // } + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__regex__" key. + absl::optional regex_field_index = IndexOfKey( + value.map_value, kRawRegexTypeFieldValue, kRegexTypeFieldValue); + if (!regex_field_index.has_value()) { + return false; + } + + // The inner value should be a map with 2 fields. + google_firestore_v1_Value& inner_value = value.map_value.fields[0].value; + if (inner_value.which_value_type != google_firestore_v1_Value_map_value_tag || + inner_value.map_value.fields_count != 2) { + return false; + } + + // Must have a string 'pattern'. + absl::optional pattern_field_index = + IndexOfKey(inner_value.map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + if (!pattern_field_index.has_value() || + inner_value.map_value.fields[pattern_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_string_value_tag) { + return false; + } + + // Must have a string 'options'. + absl::optional options_field_index = + IndexOfKey(inner_value.map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + if (!options_field_index.has_value() || + inner_value.map_value.fields[options_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_string_value_tag) { + return false; + } + + return true; +} + +bool IsBsonObjectId(const google_firestore_v1_Value& value) { + // A BsonObjectId is expected to be a map as follows: + // { + // "__oid__": "..." + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__oid__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonObjectIdTypeFieldValue, + kBsonObjectIdTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have a string value. + google_firestore_v1_Value& oid = value.map_value.fields[0].value; + if (oid.which_value_type != google_firestore_v1_Value_string_value_tag) { + return false; + } + + return true; +} + +bool IsBsonTimestamp(const google_firestore_v1_Value& value) { + // A BsonTimestamp is expected to be a map as follows: + // { + // "__request_timestamp__": { + // "seconds": "...", + // "increment": "..." + // } + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__request_timestamp__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonTimestampTypeFieldValue, + kBsonTimestampTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // The inner value should be a map with 2 fields. + google_firestore_v1_Value& innerValue = value.map_value.fields[0].value; + if (innerValue.which_value_type != google_firestore_v1_Value_map_value_tag || + innerValue.map_value.fields_count != 2) { + return false; + } + + // Must have an integer 'seconds' field. + absl::optional seconds_field_index = + IndexOfKey(innerValue.map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + if (!seconds_field_index.has_value() || + innerValue.map_value.fields[seconds_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + // Must have an integer 'increment'. + absl::optional increment_field_index = + IndexOfKey(innerValue.map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + if (!increment_field_index.has_value() || + innerValue.map_value.fields[increment_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + return true; +} + +bool IsBsonBinaryData(const google_firestore_v1_Value& value) { + // A BsonTimestamp is expected to be a map as follows: + // { + // "__binary__": <> + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__binary__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonBinaryDataTypeFieldValue, + kBsonBinaryDataTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have a 'bytes' value. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_bytes_value_tag) { + return false; + } + + return true; +} + +bool IsInt32Value(const google_firestore_v1_Value& value) { + // An Int32Value is expected to be a map as follows: + // { + // "__int__": 12345 + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__int__" key. + absl::optional field_index = IndexOfKey( + value.map_value, kRawInt32TypeFieldValue, kInt32TypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have an integer value. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + return true; +} + +bool IsBsonType(const google_firestore_v1_Value& value) { + MapType mapType = DetectMapType(value); + return mapType == MapType::kMinKey || mapType == MapType::kMaxKey || + mapType == MapType::kRegex || mapType == MapType::kInt32 || + mapType == MapType::kBsonObjectId || + mapType == MapType::kBsonTimestamp || + mapType == MapType::kBsonBinaryData; +} + google_firestore_v1_Value NaNValue() { google_firestore_v1_Value nan_value; nan_value.which_value_type = google_firestore_v1_Value_double_value_tag; @@ -833,6 +1388,13 @@ google_firestore_v1_Value NaNValue() { return nan_value; } +google_firestore_v1_Value ZeroIntegerValue() { + google_firestore_v1_Value zero_value; + zero_value.which_value_type = google_firestore_v1_Value_integer_value_tag; + zero_value.integer_value = 0; + return zero_value; +} + bool IsNaNValue(const google_firestore_v1_Value& value) { return value.which_value_type == google_firestore_v1_Value_double_value_tag && std::isnan(value.double_value); @@ -922,6 +1484,128 @@ google_firestore_v1_Value MinVector() { return lowerBound; } +google_firestore_v1_Value MinRegex() { + google_firestore_v1_MapValue_FieldsEntry* inner_field_entries = + nanopb::MakeArray(2); + inner_field_entries[0].key = kRegexTypePatternFieldValue; + inner_field_entries[0].value = MinString(); + inner_field_entries[1].key = kRegexTypeOptionsFieldValue; + inner_field_entries[1].value = MinString(); + google_firestore_v1_MapValue inner_map_value; + inner_map_value.fields_count = 2; + inner_map_value.fields = inner_field_entries; + google_firestore_v1_Value inner_value; + inner_value.which_value_type = google_firestore_v1_Value_map_value_tag; + inner_value.map_value = inner_map_value; + + google_firestore_v1_MapValue_FieldsEntry* outer_field_entries = + nanopb::MakeArray(1); + outer_field_entries[0].key = kRegexTypeFieldValue; + outer_field_entries[0].value = inner_value; + google_firestore_v1_MapValue outer_map_value; + outer_map_value.fields_count = 1; + outer_map_value.fields = outer_field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = outer_map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonObjectId() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kBsonObjectIdTypeFieldValue; + field_entries[0].value = MinString(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonTimestamp() { + google_firestore_v1_MapValue_FieldsEntry* inner_field_entries = + nanopb::MakeArray(2); + inner_field_entries[0].key = kBsonTimestampTypeSecondsFieldValue; + inner_field_entries[0].value = ZeroIntegerValue(); + inner_field_entries[1].key = kBsonTimestampTypeIncrementFieldValue; + inner_field_entries[1].value = ZeroIntegerValue(); + google_firestore_v1_MapValue inner_map_value; + inner_map_value.fields_count = 2; + inner_map_value.fields = inner_field_entries; + google_firestore_v1_Value inner_value; + inner_value.which_value_type = google_firestore_v1_Value_map_value_tag; + inner_value.map_value = inner_map_value; + + google_firestore_v1_MapValue_FieldsEntry* outer_field_entries = + nanopb::MakeArray(1); + outer_field_entries[0].key = kBsonTimestampTypeFieldValue; + outer_field_entries[0].value = inner_value; + google_firestore_v1_MapValue outer_map_value; + outer_map_value.fields_count = 1; + outer_map_value.fields = outer_field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = outer_map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonBinaryData() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kBsonBinaryDataTypeFieldValue; + field_entries[0].value = MinBytes(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinKeyValue() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kMinKeyTypeFieldValue; + field_entries[0].value = NullValue(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + +google_firestore_v1_Value MaxKeyValue() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kMaxKeyTypeFieldValue; + field_entries[0].value = NullValue(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + google_firestore_v1_Value MinMap() { google_firestore_v1_Value lowerBound; lowerBound.which_value_type = google_firestore_v1_Value_map_value_tag; diff --git a/Firestore/core/src/model/value_util.h b/Firestore/core/src/model/value_util.h index 708b71ccd16..fbea964a8ec 100644 --- a/Firestore/core/src/model/value_util.h +++ b/Firestore/core/src/model/value_util.h @@ -46,8 +46,8 @@ extern const char* kRawTypeValueFieldKey; extern pb_bytes_array_s* kTypeValueFieldKey; /** The field value of a maximum proto value. */ -extern const char* kRawMaxValueFieldValue; -extern pb_bytes_array_s* kMaxValueFieldValue; +extern const char* kRawInternalMaxValueFieldValue; +extern pb_bytes_array_s* kInternalMaxValueFieldValue; /** The type of a VectorValue proto. */ extern const char* kRawVectorTypeFieldValue; @@ -57,26 +57,102 @@ extern pb_bytes_array_s* kVectorTypeFieldValue; extern const char* kRawVectorValueFieldKey; extern pb_bytes_array_s* kVectorValueFieldKey; +/** The key of a MinKey in a map proto. */ +extern const char* kRawMinKeyTypeFieldValue; +extern pb_bytes_array_s* kMinKeyTypeFieldValue; + +/** The key of a MaxKey in a map proto. */ +extern const char* kRawMaxKeyTypeFieldValue; +extern pb_bytes_array_s* kMaxKeyTypeFieldValue; + +/** The key of a regex in a map proto. */ +extern const char* kRawRegexTypeFieldValue; +extern pb_bytes_array_s* kRegexTypeFieldValue; + +/** The regex pattern key. */ +extern const char* kRawRegexTypePatternFieldValue; +extern pb_bytes_array_s* kRegexTypePatternFieldValue; + +/** The regex options key. */ +extern const char* kRawRegexTypeOptionsFieldValue; +extern pb_bytes_array_s* kRegexTypeOptionsFieldValue; + +/** The key of an int32 in a map proto. */ +extern const char* kRawInt32TypeFieldValue; +extern pb_bytes_array_s* kInt32TypeFieldValue; + +/** The key of a BSON ObjectId in a map proto. */ +extern const char* kRawBsonObjectIdTypeFieldValue; +extern pb_bytes_array_s* kBsonObjectIdTypeFieldValue; + +/** The key of a BSON Timestamp in a map proto. */ +extern const char* kRawBsonTimestampTypeFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeFieldValue; + +/** The key of a BSON Timestamp seconds in a map proto. */ +extern const char* kRawBsonTimestampTypeSecondsFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeSecondsFieldValue; + +/** The key of a BSON Timestamp increment in a map proto. */ +extern const char* kRawBsonTimestampTypeIncrementFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeIncrementFieldValue; + +/** The key of a BSON Binary Data in a map proto. */ +extern const char* kRawBsonBinaryDataTypeFieldValue; +extern pb_bytes_array_s* kBsonBinaryDataTypeFieldValue; + /** * The order of types in Firestore. This order is based on the backend's - * ordering, but modified to support server timestamps. + * ordering, but modified to support server timestamps and `MAX_VALUE` inside + * the SDK. */ enum class TypeOrder { kNull = 0, - kBoolean = 1, - kNumber = 2, - kTimestamp = 3, - kServerTimestamp = 4, - kString = 5, - kBlob = 6, - kReference = 7, - kGeoPoint = 8, - kArray = 9, - kVector = 10, - kMap = 11, - kMaxValue = 12 + kMinKey = 1, + kBoolean = 2, + // Note: all numbers (32-bit int, 64-bit int, 64-bit double, 128-bit decimal, + // etc.) are sorted together numerically. The `CompareNumbers` function + // distinguishes between different number types and compares them accordingly. + kNumber = 3, + kTimestamp = 4, + kBsonTimestamp = 5, + kServerTimestamp = 6, + kString = 7, + kBlob = 8, + kBsonBinaryData = 9, + kReference = 10, + kBsonObjectId = 11, + kGeoPoint = 12, + kRegex = 13, + kArray = 14, + kVector = 15, + kMap = 16, + kMaxKey = 17, + kInternalMaxValue = 18 }; +/** + * The type that a Map is used to represent. + * Most Maps are NORMAL maps, however, some maps are used to identify more + * complex types. + */ +enum class MapType { + kNormal = 0, + kServerTimestamp = 1, + kInternalMaxValue = 2, + kVector = 3, + kMinKey = 4, + kMaxKey = 5, + kRegex = 6, + kInt32 = 7, + kBsonObjectId = 8, + kBsonTimestamp = 9, + kBsonBinaryData = 10 +}; + +/** Returns the Map type for the given value. */ +MapType DetectMapType(const google_firestore_v1_Value& value); + /** Returns the backend's type order of the given Value type. */ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value); @@ -153,34 +229,75 @@ bool IsNullValue(const google_firestore_v1_Value& value); * The returned value might point to heap allocated memory that is owned by * this function. To take ownership of this memory, call `DeepClone`. */ -google_firestore_v1_Value MinValue(); +google_firestore_v1_Value InternalMinValue(); -/** Returns `true` if `value` is MinValue() in its Protobuf representation. */ -bool IsMinValue(const google_firestore_v1_Value& value); +/** + * Returns `true` if `value` is InternalMinValue() in its Protobuf + * representation. + */ +bool IsInternalMinValue(const google_firestore_v1_Value& value); /** * Returns a Protobuf value that is larger than any legitimate value SDK * users can create. * * Under the hood, it is a sentinel Protobuf Map with special fields that - * Firestore comparison logic always return true for `MaxValue() > v`, for any - * v users can create, regardless `v`'s type and value. + * Firestore comparison logic always return true for `InternalMaxValue() > v`, + * for any `v` users can create, regardless `v`'s type and value. * * The returned value might point to heap allocated memory that is owned by * this function. To take ownership of this memory, call `DeepClone`. */ -google_firestore_v1_Value MaxValue(); +google_firestore_v1_Value InternalMaxValue(); /** - * Returns `true` if `value` is equal to `MaxValue()`. + * Returns `true` if `value` is equal to `InternalMaxValue()`. */ -bool IsMaxValue(const google_firestore_v1_Value& value); +bool IsInternalMaxValue(const google_firestore_v1_Value& value); /** - * Returns `true` if `value` represents a VectorValue.. + * Returns `true` if `value` represents a VectorValue. */ bool IsVectorValue(const google_firestore_v1_Value& value); +/** + * Returns `true` if `value` represents a MinKey. + */ +bool IsMinKeyValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a MaxKey. + */ +bool IsMaxKeyValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a RegexValue. + */ +bool IsRegexValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents an Int32Value. + */ +bool IsInt32Value(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonObjectId. + */ +bool IsBsonObjectId(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonTimestamp. + */ +bool IsBsonTimestamp(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonBinaryData. + */ +bool IsBsonBinaryData(const google_firestore_v1_Value& value); + +/** Returns true if `value` is a BSON Type. */ +bool IsBsonType(const google_firestore_v1_Value& value); + /** * Returns the index of the specified key (`kRawTypeValueFieldKey`) in the * map (`mapValue`). `kTypeValueFieldKey` is an alternative representation @@ -217,6 +334,18 @@ google_firestore_v1_Value MinReference(); google_firestore_v1_Value MinGeoPoint(); +google_firestore_v1_Value MinKeyValue(); + +google_firestore_v1_Value MaxKeyValue(); + +google_firestore_v1_Value MinBsonBinaryData(); + +google_firestore_v1_Value MinBsonObjectId(); + +google_firestore_v1_Value MinBsonTimestamp(); + +google_firestore_v1_Value MinRegex(); + google_firestore_v1_Value MinArray(); google_firestore_v1_Value MinVector(); diff --git a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc index 72d788c7832..8d384de40ea 100644 --- a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc +++ b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc @@ -50,6 +50,7 @@ using ProtoDocument = ::google::firestore::v1::Document; using ProtoMaybeDocument = ::firestore::client::MaybeDocument; using ProtoNamedQuery = ::firestore::NamedQuery; using ProtoValue = ::google::firestore::v1::Value; +using MapValue = ::google::firestore::v1::MapValue; using core::Query; using core::Target; using local::LocalSerializer; @@ -622,6 +623,93 @@ TEST_F(BundleSerializerTest, DecodesArrayValues) { VerifyFieldValueRoundtrip(value); } +TEST_F(BundleSerializerTest, DecodesMinKey) { + ProtoValue null_value; + null_value.set_null_value(google::protobuf::NULL_VALUE); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawMinKeyTypeFieldValue, null_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesMaxKey) { + ProtoValue null_value; + null_value.set_null_value(google::protobuf::NULL_VALUE); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawMaxKeyTypeFieldValue, null_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesInt32Value) { + ProtoValue int_value; + int_value.set_integer_value(1234L); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawInt32TypeFieldValue, int_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesRegexValue) { + ProtoValue pattern_value; + ProtoValue options_value; + ProtoValue inner_map_value; + ProtoValue value; + + pattern_value.set_string_value("^foo"); + options_value.set_string_value("i"); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypePatternFieldValue, pattern_value}); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypeOptionsFieldValue, options_value}); + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypeFieldValue, inner_map_value}); + + VerifyFieldValueRoundtrip(value); +} + +TEST_F(BundleSerializerTest, DecodesBsonObjectId) { + ProtoValue oid_value; + oid_value.set_string_value("foo"); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonObjectIdTypeFieldValue, oid_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesBsonTimestamp) { + ProtoValue seconds_value; + ProtoValue increment_value; + ProtoValue inner_map_value; + ProtoValue value; + + seconds_value.set_integer_value(1234L); + increment_value.set_integer_value(5678L); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeSecondsFieldValue, seconds_value}); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeIncrementFieldValue, increment_value}); + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeFieldValue, inner_map_value}); + + VerifyFieldValueRoundtrip(value); +} + +TEST_F(BundleSerializerTest, DecodesBsonBinaryData) { + ProtoValue binary_value; + uint8_t array[]{0, 1, 2, 3}; + binary_value.set_bytes_value(array, 4); + ProtoValue value; + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonBinaryDataTypeFieldValue, binary_value}); + + VerifyFieldValueRoundtrip(value); +} + TEST_F(BundleSerializerTest, DecodesNestedObjectValues) { ProtoValue b; b.set_boolean_value(true); diff --git a/Firestore/core/test/unit/core/target_test.cc b/Firestore/core/test/unit/core/target_test.cc index 18de92c15f4..e48db89a13b 100644 --- a/Firestore/core/test/unit/core/target_test.cc +++ b/Firestore/core/test/unit/core/target_test.cc @@ -186,11 +186,11 @@ TEST(TargetTest, OrderByQueryBound) { Target target = Query("c").AddingOrderBy(OrderBy("foo")).ToTarget(); FieldIndex index = MakeFieldIndex("c", "foo", Segment::Kind::kAscending); auto lower_bound = target.GetLowerBound(index); - EXPECT_EQ(lower_bound.values[0], model::MinValue()); + EXPECT_EQ(lower_bound.values[0], model::InternalMinValue()); EXPECT_TRUE(lower_bound.inclusive); auto upper_bound = target.GetUpperBound(index); - EXPECT_EQ(upper_bound.values[0], model::MaxValue()); + EXPECT_EQ(upper_bound.values[0], model::InternalMaxValue()); EXPECT_TRUE(upper_bound.inclusive); } @@ -219,7 +219,7 @@ TEST(TargetTest, StartingAtQueryBound) { VerifyBound(lower_bound, true, {*Value("bar")}); auto upper_bound = target.GetUpperBound(index); - EXPECT_EQ(upper_bound.values[0], model::MaxValue()); + EXPECT_EQ(upper_bound.values[0], model::InternalMaxValue()); EXPECT_TRUE(upper_bound.inclusive); } @@ -287,7 +287,7 @@ TEST(TargetTest, EndingAtQueryBound) { FieldIndex index = MakeFieldIndex("c", "foo", Segment::Kind::kAscending); auto lower_bound = target.GetLowerBound(index); - ASSERT_EQ(lower_bound.values[0], model::MinValue()); + ASSERT_EQ(lower_bound.values[0], model::InternalMinValue()); ASSERT_TRUE(lower_bound.inclusive); auto upper_bound = target.GetUpperBound(index); diff --git a/Firestore/core/test/unit/index/CMakeLists.txt b/Firestore/core/test/unit/index/CMakeLists.txt new file mode 100644 index 00000000000..648359c5c24 --- /dev/null +++ b/Firestore/core/test/unit/index/CMakeLists.txt @@ -0,0 +1,27 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if(NOT FIREBASE_IOS_BUILD_TESTS) + return() +endif() + +file(GLOB sources *.cc) +firebase_ios_add_test(firestore_index_test ${sources}) + +target_link_libraries( + firestore_index_test PRIVATE + GMock::GMock + firestore_core + firestore_testutil +) diff --git a/Firestore/core/test/unit/index/index_value_writer_test.cc b/Firestore/core/test/unit/index/index_value_writer_test.cc new file mode 100644 index 00000000000..b66bd182a54 --- /dev/null +++ b/Firestore/core/test/unit/index/index_value_writer_test.cc @@ -0,0 +1,363 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/core/src/index/firestore_index_value_writer.h" +#include "Firestore/core/src/index/index_byte_encoder.h" +#include "Firestore/core/src/nanopb/nanopb_util.h" +#include "Firestore/core/test/unit/testutil/testutil.h" +#include "gtest/gtest.h" + +namespace firebase { +namespace firestore { +namespace index { + +namespace { + +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; +using testutil::Int32; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; +using testutil::VectorType; + +TEST(IndexValueWriterTest, writeIndexValueSupportsVector) { + // Value + auto vector = VectorType(1, 2, 3); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*vector, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kVector); // Vector type + index_byte_encoder->WriteLong(IndexType::kNumber); // Number type + index_byte_encoder->WriteLong(3); // Vector Length + index_byte_encoder->WriteLong(IndexType::kString); + index_byte_encoder->WriteString("value"); + index_byte_encoder->WriteLong(IndexType::kArray); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(1); // position 0 + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(2); // position 1 + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(3); // position 2 + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsEmptyVector) { + // Value - Create an empty vector + auto vector = VectorType(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*vector, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + + index_byte_encoder->WriteLong(IndexType::kVector); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteLong(0); // vector length + index_byte_encoder->WriteLong(IndexType::kString); + index_byte_encoder->WriteString("value"); + index_byte_encoder->WriteLong(IndexType::kArray); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonObjectId) { + // Value + auto value = BsonObjectId("507f191e810c19729de860ea"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonObjectId); + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray("507f191e810c19729de860ea")); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonBinaryData) { + // Value + auto value = BsonBinaryData(1, {1, 2, 3}); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonBinaryData); + // Expected bytes: subtype (1) + data {1, 2, 3} + const uint8_t binary_payload[] = {1, 1, 2, 3}; + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray(binary_payload, sizeof(binary_payload))); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonBinaryWithEmptyData) { + // Value + auto value = BsonBinaryData(1, {}); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonBinaryData); + // Expected bytes: subtype (1) only + const uint8_t binary_payload[] = {1}; + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray(binary_payload, sizeof(binary_payload))); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonTimestamp) { + // Value + auto value = BsonTimestamp(1, 2); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + uint64_t timestamp_encoded = (1ULL << 32) | (2); + index_byte_encoder->WriteLong(timestamp_encoded); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsLargestBsonTimestamp) { + // Value + auto value = BsonTimestamp(4294967295ULL, 4294967295ULL); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + uint64_t timestamp_encoded = (4294967295ULL << 32) | (4294967295ULL); + index_byte_encoder->WriteLong(timestamp_encoded); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsSmallestBsonTimestamp) { + // Value + auto value = BsonTimestamp(0, 0); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + index_byte_encoder->WriteLong(0); // (0 << 32 | 0) + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsRegex) { + // Value + auto value = Regex("^foo", "i"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kRegex); + index_byte_encoder->WriteString("^foo"); + index_byte_encoder->WriteString("i"); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsInt32) { + // Value + auto value = Int32(1); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(1.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsLargestInt32) { + // Value + auto value = Int32(2147483647); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(2147483647.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsSmallestInt32) { + // Value + auto value = Int32(-2147483648); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(-2147483648.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsMinKey) { + // Value + auto value = MinKey(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kMinKey); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsMaxKey) { + // Value + auto value = MaxKey(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kMaxKey); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +} // namespace +} // namespace index +} // namespace firestore +} // namespace firebase diff --git a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc index 3cbc7667dd8..895d8acb2e0 100644 --- a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc +++ b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc @@ -15,6 +15,7 @@ */ #include "Firestore/core/src/local/leveldb_index_manager.h" +#include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/src/core/bound.h" #include "Firestore/core/src/local/leveldb_persistence.h" #include "Firestore/core/src/model/field_index.h" @@ -39,16 +40,26 @@ using model::ResourcePath; using model::Segment; using testutil::AndFilters; using testutil::Array; +using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::CollectionGroupQuery; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::MakeFieldIndex; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::Query; +using testutil::Ref; +using testutil::Regex; +using testutil::Value; using testutil::VectorType; using testutil::Version; @@ -976,6 +987,525 @@ TEST_F(LevelDbIndexManagerTest, IndexVectorValueFields) { }); } +TEST_F(LevelDbIndexManagerTest, IndexBsonObjectIdFields) { + persistence_->Run("TestIndexBsonObjectIdFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc2", Map("key", BsonObjectId("507f191e810c19729de860eb"))); + AddDoc("coll/doc3", Map("key", BsonObjectId("507f191e810c19729de860ec"))); + + auto query = Query("coll").AddingOrderBy(OrderBy("key")); + { + SCOPED_TRACE("no filter"); + VerifyResults(query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "==", BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE("Query BsonObjectId with EqualTo filter"); + VerifyResults(query, {"coll/doc1"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "!=", BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE("Query BsonObjectId with NotEqualTo filter"); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", ">=", BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with GreaterThanOrEqualTo filter"); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "<=", BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with LessThanOrEqualTo filter"); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", ">", + BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with GreaterThan filter"); + VerifyResults(query, {"coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", "<", + BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with LessThan filter"); + VerifyResults(query, {"coll/doc1"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", ">", + BsonObjectId("507f191e810c19729de860ec"))); + { + SCOPED_TRACE( + "Query BsonObjectId with GreaterThan filter and empty result set"); + VerifyResults(query, {}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", "<", + BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE( + "Query BsonObjectId with LessThan filter and empty result set"); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonBinaryDataFields) { + persistence_->Run("TestIndexBsonBinaryDataFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc2", Map("key", BsonBinaryData(1, {1, 2, 4}))); + AddDoc("coll/doc3", Map("key", BsonBinaryData(1, {2, 1, 2}))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with EqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "==", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with NotEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "!=", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", ">=", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "<=", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with GreaterThan filter"); + auto query = base_query.AddingFilter( + Filter("key", ">", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with LessThan filter"); + auto query = base_query.AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE( + "Query BsonBinaryData with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter( + Filter("key", ">", BsonBinaryData(1, {2, 1, 2}))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query BsonBinaryData with LessThan filter and empty result set"); + auto query = base_query.AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonTimestampFields) { + persistence_->Run("TestIndexBsonTimestampFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonTimestamp(1, 1))); + AddDoc("coll/doc2", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc3", Map("key", BsonTimestamp(2, 1))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with EqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "==", BsonTimestamp(1, 1))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", BsonTimestamp(1, 1))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with GreaterThan filter"); + auto query = + base_query.AddingFilter(Filter("key", ">", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with LessThan filter"); + auto query = + base_query.AddingFilter(Filter("key", "<", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE( + "Query BsonTimestamp with GreaterThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", ">", BsonTimestamp(2, 1))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query BsonTimestamp with LessThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", "<", BsonTimestamp(1, 1))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexInt32Fields) { + persistence_->Run("TestIndexInt32Fields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", Int32(1))); + AddDoc("coll/doc2", Map("key", Int32(2))); + AddDoc("coll/doc3", Map("key", Int32(3))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", Int32(1))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Int32 with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", Int32(1))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", Int32(2))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", Int32(2))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", Int32(2))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", Int32(2))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", ">", Int32(3))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query Int32 with LessThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", "<", Int32(1))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexRegexFields) { + persistence_->Run("TestIndexRegexFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", Regex("a", "i"))); + AddDoc("coll/doc2", Map("key", Regex("a", "m"))); + AddDoc("coll/doc3", Map("key", Regex("b", "i"))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with EqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "==", Regex("a", "i"))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Regex with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", Regex("a", "i"))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", Regex("a", "m"))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", Regex("a", "m"))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", Regex("a", "m"))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", Regex("a", "m"))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", ">", Regex("b", "i"))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query Regex with LessThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", "<", Regex("a", "i"))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexMinKeyFields) { + persistence_->Run("TestIndexMinKeyFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", MinKey())); + AddDoc("coll/doc3", Map("key", nullptr)); + AddDoc("coll/doc4", Map("key", 1)); + AddDoc("coll/doc5", Map("key", MaxKey())); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc3", "coll/doc1", "coll/doc2", + "coll/doc4", "coll/doc5"}); + } + { + SCOPED_TRACE("Query MinKey with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", MinKey())); + VerifyResults(query, {"coll/doc4", "coll/doc5"}); + } + { + SCOPED_TRACE("Query MinKey with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", MinKey())); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query MinKey with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", MinKey())); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexMaxKeyFields) { + persistence_->Run("TestIndexMaxKeyFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", 1)); + AddDoc("coll/doc3", Map("key", MaxKey())); + AddDoc("coll/doc4", Map("key", MaxKey())); + AddDoc("coll/doc5", Map("key", nullptr)); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc5", "coll/doc1", "coll/doc2", + "coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", MaxKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MaxKey with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", MaxKey())); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query MaxKey with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", MaxKey())); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonTypesTogether) { + persistence_->Run("TestIndexBsonTypesTogether", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kDescending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", Int32(2))); + AddDoc("coll/doc3", Map("key", Int32(1))); + AddDoc("coll/doc4", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc5", Map("key", BsonTimestamp(1, 1))); + AddDoc("coll/doc6", Map("key", BsonBinaryData(1, {1, 2, 4}))); + AddDoc("coll/doc7", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc8", Map("key", BsonObjectId("507f191e810c19729de860eb"))); + AddDoc("coll/doc9", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc10", Map("key", Regex("a", "m"))); + AddDoc("coll/doc11", Map("key", Regex("a", "i"))); + AddDoc("coll/doc12", Map("key", MaxKey())); + + auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); + + VerifyResults(query, {"coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", + "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", + "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"}); + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexAllTypesTogether) { + persistence_->Run("TestIndexAllTypesTogether", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kDescending)); + + AddDoc("coll/a", Map("key", nullptr)); + AddDoc("coll/b", Map("key", MinKey())); + AddDoc("coll/c", Map("key", true)); + AddDoc("coll/d", Map("key", std::numeric_limits::quiet_NaN())); + AddDoc("coll/e", Map("key", Int32(1))); + AddDoc("coll/f", Map("key", 2.0)); + AddDoc("coll/g", Map("key", 3)); + AddDoc("coll/h", Map("key", Timestamp(100, 123456000))); + AddDoc("coll/i", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/j", Map("key", "string")); + AddDoc("coll/k", Map("key", BlobValue(0, 1, 255))); + AddDoc("coll/l", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/m", Map("key", Ref("project", "coll/doc"))); + AddDoc("coll/n", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/o", Map("key", GeoPoint(0, 1))); + AddDoc("coll/p", Map("key", Regex("^foo", "i"))); + AddDoc("coll/q", Map("key", Array(1, 2))); + AddDoc("coll/r", Map("key", VectorType(1, 2))); + AddDoc("coll/s", Map("key", Map("a", 1))); + AddDoc("coll/t", Map("key", MaxKey())); + + auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); + + VerifyResults(query, {"coll/t", "coll/s", "coll/r", "coll/q", "coll/p", + "coll/o", "coll/n", "coll/m", "coll/l", "coll/k", + "coll/j", "coll/i", "coll/h", "coll/g", "coll/f", + "coll/e", "coll/d", "coll/c", "coll/b", "coll/a"}); + }); +} + TEST_F(LevelDbIndexManagerTest, AdvancedQueries) { // This test compares local query results with those received from the Java // Server SDK. diff --git a/Firestore/core/test/unit/local/leveldb_local_store_test.cc b/Firestore/core/test/unit/local/leveldb_local_store_test.cc index 85e4286698b..8f53cbbee03 100644 --- a/Firestore/core/test/unit/local/leveldb_local_store_test.cc +++ b/Firestore/core/test/unit/local/leveldb_local_store_test.cc @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/src/core/filter.h" #include "Firestore/core/src/core/query.h" #include "Firestore/core/src/local/leveldb_persistence.h" @@ -37,20 +38,30 @@ using model::IndexState; using testutil::AddedRemoteEvent; using testutil::Array; +using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DeletedDoc; using testutil::DeleteMutation; using testutil::Doc; using testutil::Field; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::MakeFieldIndex; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::OverlayTypeMap; +using testutil::Ref; +using testutil::Regex; using testutil::SetMutation; using testutil::UpdateRemoteEvent; using testutil::Vector; +using testutil::VectorType; using testutil::Version; class TestHelper : public LocalStoreTestHelper { @@ -309,6 +320,680 @@ TEST_F(LevelDbLocalStoreTest, UsesIndexForLimitQueryWhenIndexIsUpdated) { FSTAssertQueryReturned("coll/a", "coll/c"); } +TEST_F(LevelDbLocalStoreTest, IndexesBsonObjectId) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation( + "coll/doc1", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation( + "coll/doc2", Map("key", BsonObjectId("507f191e810c19729de860eb")))); + WriteMutation(SetMutation( + "coll/doc3", Map("key", BsonObjectId("507f191e810c19729de860ec")))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonObjectId("507f191e810c19729de860eb"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonObjectId("507f191e810c19729de860eb"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonObjectId("507f191e810c19729de860ec"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonObjectId("507f191e810c19729de860ea"), + BsonObjectId("507f191e810c19729de860eb")))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesBsonTimestamp) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation( + SetMutation("coll/doc1", Map("key", BsonTimestamp(1000, 1000)))); + WriteMutation( + SetMutation("coll/doc2", Map("key", BsonTimestamp(1001, 1000)))); + WriteMutation( + SetMutation("coll/doc3", Map("key", BsonTimestamp(1000, 1001)))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonTimestamp(1000, 1001))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonTimestamp(1000, 1001))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonTimestamp(1001, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonTimestamp(1000, 1000), BsonTimestamp(1000, 1001)))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesBsonBinary) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation( + SetMutation("coll/doc1", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation( + SetMutation("coll/doc2", Map("key", BsonBinaryData(1, {1, 2})))); + WriteMutation( + SetMutation("coll/doc3", Map("key", BsonBinaryData(1, {1, 2, 4})))); + WriteMutation( + SetMutation("coll/doc4", Map("key", BsonBinaryData(2, {1, 2})))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 4, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc1", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonBinaryData(2, {1, 2}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonBinaryData(1, {1, 2, 3}), BsonBinaryData(1, {1, 2})))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesRegex) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", Regex("^bar", "i")))); + WriteMutation(SetMutation("coll/doc2", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc3", Map("key", Regex("^foo", "i")))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", Regex("^foo", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(Regex("^bar", "i"), Regex("^foo", "i")))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesInt32) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", Int32(-1)))); + WriteMutation(SetMutation("coll/doc2", Map("key", Int32(0)))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "==", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "!=", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", ">=", Int32(0))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "<=", Int32(0))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter(Filter("key", ">", Int32(1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter(Filter("key", "<", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(Int32(-1), Int32(0)))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesMinKey) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc4", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc5", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(testutil::MinKey(), testutil::MaxKey()))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3", "coll/doc5"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesMaxKey) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc4", Map("key", MaxKey()))); + WriteMutation(SetMutation("coll/doc5", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); +} + +TEST_F(LevelDbLocalStoreTest, IndexesAllBsonTypesTogether) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kDescending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc2", Map("key", Int32(2)))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + WriteMutation( + SetMutation("coll/doc4", Map("key", BsonTimestamp(1000, 1001)))); + WriteMutation( + SetMutation("coll/doc5", Map("key", BsonTimestamp(1000, 1000)))); + WriteMutation( + SetMutation("coll/doc6", Map("key", BsonBinaryData(1, {1, 2, 4})))); + WriteMutation( + SetMutation("coll/doc7", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation(SetMutation( + "coll/doc8", Map("key", BsonObjectId("507f191e810c19729de860eb")))); + WriteMutation(SetMutation( + "coll/doc9", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation("coll/doc10", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc11", Map("key", Regex("^bar", "i")))); + WriteMutation(SetMutation("coll/doc12", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "desc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}, + {Key("coll/doc7"), model::Mutation::Type::Set}, + {Key("coll/doc8"), model::Mutation::Type::Set}, + {Key("coll/doc9"), model::Mutation::Type::Set}, + {Key("coll/doc10"), model::Mutation::Type::Set}, + {Key("coll/doc11"), model::Mutation::Type::Set}, + {Key("coll/doc12"), model::Mutation::Type::Set}})); + + FSTAssertQueryReturned("coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", + "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", + "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesAllTypesTogether) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", true))); + WriteMutation(SetMutation("coll/doc4", Map("key", NAN))); + WriteMutation(SetMutation("coll/doc5", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc6", Map("key", 2.0))); + WriteMutation(SetMutation("coll/doc7", Map("key", 3L))); + WriteMutation( + SetMutation("coll/doc8", Map("key", Timestamp(100, 123456000)))); + WriteMutation(SetMutation("coll/doc9", Map("key", BsonTimestamp(1, 2)))); + WriteMutation(SetMutation("coll/doc10", Map("key", "string"))); + WriteMutation(SetMutation("coll/doc11", Map("key", BlobValue(1, 2, 3)))); + WriteMutation( + SetMutation("coll/doc12", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation( + SetMutation("coll/doc13", Map("key", Ref("project/db", "col/doc")))); + WriteMutation(SetMutation( + "coll/doc14", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation("coll/doc15", Map("key", GeoPoint(1, 2)))); + WriteMutation(SetMutation("coll/doc16", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc17", Map("key", Array(2L, "foo")))); + WriteMutation( + SetMutation("coll/doc18", Map("key", VectorType(1.0, 2.0, 3.0)))); + WriteMutation( + SetMutation("coll/doc19", Map("key", Map("bar", 1L, "foo", 2L)))); + WriteMutation(SetMutation("coll/doc20", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}, + {Key("coll/doc7"), model::Mutation::Type::Set}, + {Key("coll/doc8"), model::Mutation::Type::Set}, + {Key("coll/doc9"), model::Mutation::Type::Set}, + {Key("coll/doc10"), model::Mutation::Type::Set}, + {Key("coll/doc11"), model::Mutation::Type::Set}, + {Key("coll/doc12"), model::Mutation::Type::Set}, + {Key("coll/doc13"), model::Mutation::Type::Set}, + {Key("coll/doc14"), model::Mutation::Type::Set}, + {Key("coll/doc15"), model::Mutation::Type::Set}, + {Key("coll/doc16"), model::Mutation::Type::Set}, + {Key("coll/doc17"), model::Mutation::Type::Set}, + {Key("coll/doc18"), model::Mutation::Type::Set}, + {Key("coll/doc19"), model::Mutation::Type::Set}, + {Key("coll/doc20"), model::Mutation::Type::Set}})); + + FSTAssertQueryReturned( + "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5", + "coll/doc6", "coll/doc7", "coll/doc8", "coll/doc9", "coll/doc10", + "coll/doc11", "coll/doc12", "coll/doc13", "coll/doc14", "coll/doc15", + "coll/doc16", "coll/doc17", "coll/doc18", "coll/doc19", "coll/doc20"); +} + TEST_F(LevelDbLocalStoreTest, IndexesServerTimestamps) { FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), "time", model::Segment::Kind::kAscending); @@ -426,7 +1111,7 @@ TEST_F(LevelDbLocalStoreTest, DoesNotAutoCreateIndexesForSmallCollections) { // SDK will not create indexes since collection size is too small. ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - FSTAssertQueryReturned("coll/a", "coll/e"); + FSTAssertQueryReturned("coll/e", "coll/a"); BackfillIndexes(); @@ -435,7 +1120,7 @@ TEST_F(LevelDbLocalStoreTest, DoesNotAutoCreateIndexesForSmallCollections) { ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 3); - FSTAssertQueryReturned("coll/a", "coll/e", "coll/f"); + FSTAssertQueryReturned("coll/e", "coll/a", "coll/f"); } TEST_F(LevelDbLocalStoreTest, @@ -541,7 +1226,7 @@ TEST_F(LevelDbLocalStoreTest, // (2). Full matched index should be created. ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - FSTAssertQueryReturned("coll/a", "coll/e"); + FSTAssertQueryReturned("coll/e", "coll/a"); SetIndexAutoCreationEnabled(false); @@ -552,7 +1237,7 @@ TEST_F(LevelDbLocalStoreTest, ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - FSTAssertQueryReturned("coll/a", "coll/e", "coll/f"); + FSTAssertQueryReturned("coll/e", "coll/a", "coll/f"); } TEST_F(LevelDbLocalStoreTest, DisableIndexAutoCreationWorks) { diff --git a/Firestore/core/test/unit/local/local_store_test.cc b/Firestore/core/test/unit/local/local_store_test.cc index 2c0affe91ee..65012b63248 100644 --- a/Firestore/core/test/unit/local/local_store_test.cc +++ b/Firestore/core/test/unit/local/local_store_test.cc @@ -67,6 +67,7 @@ using model::Document; using model::DocumentKey; using model::DocumentKeySet; using model::DocumentMap; +using model::DocumentSet; using model::ListenSequenceNumber; using model::MutableDocument; using model::MutableDocumentMap; @@ -105,6 +106,16 @@ using testutil::UpdateRemoteEventWithLimboTargets; using testutil::Value; using testutil::Vector; +std::vector DocSetToVector(const absl::optional& docs) { + std::vector result; + if (docs.has_value()) { + for (const auto& doc : *docs) { + result.push_back(doc); + } + } + return result; +} + std::vector DocMapToVector(const DocumentMap& docs) { std::vector result; for (const auto& kv : docs) { @@ -273,10 +284,19 @@ TargetData LocalStoreTestBase::GetTargetData(const core::Query& query) { }); } -QueryResult LocalStoreTestBase::ExecuteQuery(const core::Query& query) { +absl::optional LocalStoreTestBase::ExecuteQuery( + const core::Query& query) { ResetPersistenceStats(); - last_query_result_ = + local::QueryResult query_result = local_store_.ExecuteQuery(query, /* use_previous_results= */ true); + + // Start from an empty set. Use the query's comparator which is what + // ultimately gets used to order documents. + last_query_result_ = DocumentSet(query.Comparator()); + for (const auto& document : query_result.documents()) { + last_query_result_ = last_query_result_->insert(document.second); + } + return last_query_result_; } @@ -880,8 +900,8 @@ TEST_P(LocalStoreTest, CanExecuteDocumentQueries) { testutil::SetMutation("foo/baz", Map("foo", "baz")), testutil::SetMutation("foo/bar/Foo/Bar", Map("Foo", "Bar"))}); core::Query query = Query("foo/bar"); - QueryResult query_result = ExecuteQuery(query); - ASSERT_EQ(DocMapToVector(query_result.documents()), + auto query_result = ExecuteQuery(query); + ASSERT_EQ(DocSetToVector(query_result), Vector(Document{ Doc("foo/bar", 0, Map("foo", "bar")).SetHasLocalMutations()})); } @@ -894,9 +914,9 @@ TEST_P(LocalStoreTest, CanExecuteCollectionQueries) { testutil::SetMutation("foo/bar/Foo/Bar", Map("Foo", "Bar")), testutil::SetMutation("fooo/blah", Map("fooo", "blah"))}); core::Query query = Query("foo"); - QueryResult query_result = ExecuteQuery(query); + auto query_result = ExecuteQuery(query); ASSERT_EQ( - DocMapToVector(query_result.documents()), + DocSetToVector(query_result), Vector( Document{Doc("foo/bar", 0, Map("foo", "bar")).SetHasLocalMutations()}, Document{ @@ -915,9 +935,9 @@ TEST_P(LocalStoreTest, CanExecuteMixedCollectionQueries) { local_store_.WriteLocally({testutil::SetMutation("foo/bonk", Map("a", "b"))}); - QueryResult query_result = ExecuteQuery(query); + auto query_result = ExecuteQuery(query); ASSERT_EQ( - DocMapToVector(query_result.documents()), + DocSetToVector(query_result), Vector( Document{Doc("foo/bar", 20, Map("a", "b"))}, Document{Doc("foo/baz", 10, Map("a", "b"))}, diff --git a/Firestore/core/test/unit/local/local_store_test.h b/Firestore/core/test/unit/local/local_store_test.h index 1271bc4fa1b..5213b6276a7 100644 --- a/Firestore/core/test/unit/local/local_store_test.h +++ b/Firestore/core/test/unit/local/local_store_test.h @@ -25,6 +25,7 @@ #include "Firestore/core/src/local/local_store.h" #include "Firestore/core/src/local/query_engine.h" #include "Firestore/core/src/local/query_result.h" +#include "Firestore/core/src/model/document_set.h" #include "Firestore/core/src/model/mutation_batch.h" #include "Firestore/core/test/unit/local/counting_query_engine.h" #include "gtest/gtest.h" @@ -89,7 +90,7 @@ class LocalStoreTestBase : public testing::Test { std::vector&& new_field_indexes); model::TargetId AllocateQuery(core::Query query); local::TargetData GetTargetData(const core::Query& query); - local::QueryResult ExecuteQuery(const core::Query& query); + absl::optional ExecuteQuery(const core::Query& query); void SetIndexAutoCreationEnabled(bool is_enabled); void DeleteAllIndexes() const; void SetMinCollectionSizeToAutoCreateIndex(size_t new_min); @@ -112,7 +113,7 @@ class LocalStoreTestBase : public testing::Test { model::DocumentMap last_changes_; model::TargetId last_target_id_ = 0; - local::QueryResult last_query_result_; + absl::optional last_query_result_; }; /** @@ -151,20 +152,19 @@ class LocalStoreTest : public LocalStoreTestBase, /** * Asserts that the last ExecuteQuery results contain the docs in the given - * array. + * array in the same order. */ -#define FSTAssertQueryReturned(...) \ - do { \ - std::vector expected_keys = {__VA_ARGS__}; \ - ASSERT_EQ(last_query_result_.documents().size(), expected_keys.size()); \ - auto expected_keys_iterator = expected_keys.begin(); \ - for (const auto& kv : last_query_result_.documents()) { \ - const DocumentKey& actual_key = kv.first; \ - DocumentKey expected_key = Key(*expected_keys_iterator); \ - ASSERT_EQ(actual_key, expected_key); \ - ++expected_keys_iterator; \ - } \ - last_query_result_ = QueryResult{}; \ +#define FSTAssertQueryReturned(...) \ + do { \ + std::vector expected_keys = {__VA_ARGS__}; \ + ASSERT_EQ(last_query_result_->size(), expected_keys.size()); \ + auto expected_keys_iterator = expected_keys.begin(); \ + for (const auto& doc : *last_query_result_) { \ + const DocumentKey& actual_key = doc.get().key(); \ + DocumentKey expected_key = Key(*expected_keys_iterator); \ + ASSERT_EQ(actual_key, expected_key); \ + ++expected_keys_iterator; \ + } \ } while (0) /** Asserts that the given keys were removed. */ diff --git a/Firestore/core/test/unit/model/document_test.cc b/Firestore/core/test/unit/model/document_test.cc index 9172bc2d516..31e7aec0c07 100644 --- a/Firestore/core/test/unit/model/document_test.cc +++ b/Firestore/core/test/unit/model/document_test.cc @@ -25,11 +25,18 @@ namespace firebase { namespace firestore { namespace model { +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DeletedDoc; using testutil::Doc; using testutil::Field; +using testutil::Int32; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::UnknownDoc; using testutil::Value; using testutil::Version; @@ -71,6 +78,25 @@ TEST(DocumentTest, ExtractsFields) { EXPECT_EQ(doc.field(Field("owner.title")), *Value("scallywag")); } +TEST(DocumentTest, CanContainBsonTypes) { + auto data = WrapObject( + Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), + "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", + BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); + + auto doc = MutableDocument::FoundDocument(Key("col/doc"), Version(1), data); + + EXPECT_EQ(doc.data(), data); + EXPECT_EQ(doc.has_local_mutations(), false); + EXPECT_EQ(doc.field(Field("minKey")), *MinKey()); + EXPECT_EQ(doc.field(Field("maxKey")), *MaxKey()); + EXPECT_EQ(doc.field(Field("regex")), *Regex("^foo", "i")); + EXPECT_EQ(doc.field(Field("int32")), *Int32(1234)); + EXPECT_EQ(doc.field(Field("objectId")), *BsonObjectId("foo")); + EXPECT_EQ(doc.field(Field("timestamp")), *BsonTimestamp(123, 456)); + EXPECT_EQ(doc.field(Field("binary")), *BsonBinaryData(128, {7, 8, 9})); +} + TEST(DocumentTest, Equality) { MutableDocument doc = Doc("some/path", 1, Map("a", 1)); EXPECT_EQ(doc, Doc("some/path", 1, Map("a", 1))); diff --git a/Firestore/core/test/unit/model/object_value_test.cc b/Firestore/core/test/unit/model/object_value_test.cc index e5538f08847..594322fdc11 100644 --- a/Firestore/core/test/unit/model/object_value_test.cc +++ b/Firestore/core/test/unit/model/object_value_test.cc @@ -31,9 +31,16 @@ const char kBarString[] = "bar"; namespace { using absl::nullopt; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DbId; using testutil::Field; +using testutil::Int32; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::Value; using testutil::WrapObject; @@ -43,7 +50,11 @@ class ObjectValueTest : public ::testing::Test { }; TEST_F(ObjectValueTest, ExtractsFields) { - ObjectValue value = WrapObject("foo", Map("a", 1, "b", true, "c", "string")); + ObjectValue value = WrapObject( + "foo", Map("a", 1, "b", true, "c", "string"), "bson", + Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), + "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", + BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); ASSERT_EQ(google_firestore_v1_Value_map_value_tag, value.Get(Field("foo"))->which_value_type); @@ -51,21 +62,32 @@ TEST_F(ObjectValueTest, ExtractsFields) { EXPECT_EQ(*Value(1), *value.Get(Field("foo.a"))); EXPECT_EQ(*Value(true), *value.Get(Field("foo.b"))); EXPECT_EQ(*Value("string"), *value.Get(Field("foo.c"))); - + EXPECT_EQ( + *Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9}))), + *value.Get(Field("bson"))); EXPECT_EQ(nullopt, value.Get(Field("foo.a.b"))); EXPECT_EQ(nullopt, value.Get(Field("bar"))); EXPECT_EQ(nullopt, value.Get(Field("bar.a"))); } TEST_F(ObjectValueTest, ExtractsFieldMask) { - ObjectValue value = - WrapObject("a", "b", "Map", - Map("a", 1, "b", true, "c", "string", "nested", Map("d", "e")), - "emptymap", Map()); - - FieldMask expected_mask = - FieldMask({Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), - Field("Map.nested.d"), Field("emptymap")}); + ObjectValue value = WrapObject( + "a", "b", "Map", + Map("a", 1, "b", true, "c", "string", "nested", Map("d", "e")), + "emptymap", Map(), "bson", + Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9})))); + + FieldMask expected_mask = FieldMask( + {Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), + Field("Map.nested.d"), Field("emptymap"), Field("bson.minKey"), + Field("bson.maxKey"), Field("bson.regex"), Field("bson.int32"), + Field("bson.objectId"), Field("bson.timestamp"), Field("bson.binary")}); FieldMask actual_mask = value.ToFieldMask(); EXPECT_EQ(expected_mask, actual_mask); @@ -335,6 +357,48 @@ TEST_F(ObjectValueTest, DoesNotRequireSortedInserts) { EXPECT_EQ(*Value(2), *object_value.Get(Field("nested.nested.c"))); } +TEST_F(ObjectValueTest, CanHandleBsonTypesInObjectValue) { + ObjectValue object_value{}; + object_value.Set(Field("minKey"), MinKey()); + object_value.Set(Field("maxKey"), MaxKey()); + object_value.Set(Field("regex"), Regex("^foo", "i")); + object_value.Set(Field("int32"), Int32(1234)); + object_value.Set(Field("objectId"), BsonObjectId("foo")); + object_value.Set(Field("timestamp"), BsonTimestamp(123, 456)); + object_value.Set(Field("binary"), BsonBinaryData(128, {7, 8, 9})); + + EXPECT_EQ( + WrapObject(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9}))), + object_value); + + // Overwrite existing fields + object_value.Set(Field("regex"), Regex("^baz", "g")); + object_value.Set(Field("objectId"), BsonObjectId("new-foo-value")); + + // Create nested objects + object_value.Set(Field("foo.regex1"), Regex("^foo", "i")); + object_value.Set(Field("foo.regex2"), Regex("^bar", "i")); + object_value.Set(Field("foo.timestamp"), BsonTimestamp(2, 1)); + + // Delete fields + object_value.Delete(Field("foo.regex1")); + + // Overwrite nested objects + object_value.Set(Field("foo.regex2"), Regex("^bar", "x")); + + EXPECT_EQ( + WrapObject(Map( + "minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^baz", "g"), + "int32", Int32(1234), "objectId", BsonObjectId("new-foo-value"), + "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}), "foo", + Map("regex2", Regex("^bar", "x"), "timestamp", BsonTimestamp(2, 1)))), + object_value); +} + } // namespace } // namespace model diff --git a/Firestore/core/test/unit/model/value_util_test.cc b/Firestore/core/test/unit/model/value_util_test.cc index c6d2479929c..4f528e78575 100644 --- a/Firestore/core/test/unit/model/value_util_test.cc +++ b/Firestore/core/test/unit/model/value_util_test.cc @@ -40,12 +40,20 @@ using model::RefValue; using nanopb::Message; using testutil::Array; using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DbId; +using testutil::Int32; using testutil::kCanonicalNanBits; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::time_point; using testutil::Value; +using testutil::VectorType; using util::ComparisonResult; namespace { @@ -99,9 +107,6 @@ class ValueUtilTest : public ::testing::Test { ComparisonResult expected_result) { for (pb_size_t i = 0; i < left->values_count; ++i) { for (pb_size_t j = 0; j < right->values_count; ++j) { - if (expected_result != Compare(left->values[i], right->values[j])) { - std::cout << "here" << std::endl; - } EXPECT_EQ(expected_result, Compare(left->values[i], right->values[j])) << "Order check failed for '" << CanonicalId(left->values[i]) << "' and '" << CanonicalId(right->values[j]) << "' (expected " @@ -184,6 +189,50 @@ TEST(FieldValueTest, ValueHelpers) { auto double_value = Value(2.0); ASSERT_EQ(GetTypeOrder(*double_value), TypeOrder::kNumber); EXPECT_EQ(double_value->double_value, 2.0); + + auto map_value = Map("foo", "bar"); + ASSERT_EQ(GetTypeOrder(*map_value), TypeOrder::kMap); + ASSERT_EQ(DetectMapType(*map_value), MapType::kNormal); + + auto max_value = DeepClone(InternalMaxValue()); + ASSERT_EQ(GetTypeOrder(*max_value), TypeOrder::kInternalMaxValue); + ASSERT_EQ(DetectMapType(*max_value), MapType::kInternalMaxValue); + + auto server_timestamp = EncodeServerTimestamp(kTimestamp1, absl::nullopt); + ASSERT_EQ(GetTypeOrder(*server_timestamp), TypeOrder::kServerTimestamp); + ASSERT_EQ(DetectMapType(*server_timestamp), MapType::kServerTimestamp); + + auto vector_value = VectorType(100); + ASSERT_EQ(GetTypeOrder(*vector_value), TypeOrder::kVector); + ASSERT_EQ(DetectMapType(*vector_value), MapType::kVector); + + auto min_key_value = MinKey(); + ASSERT_EQ(GetTypeOrder(*min_key_value), TypeOrder::kMinKey); + ASSERT_EQ(DetectMapType(*min_key_value), MapType::kMinKey); + + auto max_key_value = MaxKey(); + ASSERT_EQ(GetTypeOrder(*max_key_value), TypeOrder::kMaxKey); + ASSERT_EQ(DetectMapType(*max_key_value), MapType::kMaxKey); + + auto regex_value = Regex("^foo", "x"); + ASSERT_EQ(GetTypeOrder(*regex_value), TypeOrder::kRegex); + ASSERT_EQ(DetectMapType(*regex_value), MapType::kRegex); + + auto int32_value = Int32(1); + ASSERT_EQ(GetTypeOrder(*int32_value), TypeOrder::kNumber); + ASSERT_EQ(DetectMapType(*int32_value), MapType::kInt32); + + auto bson_object_id_value = BsonObjectId("foo"); + ASSERT_EQ(GetTypeOrder(*bson_object_id_value), TypeOrder::kBsonObjectId); + ASSERT_EQ(DetectMapType(*bson_object_id_value), MapType::kBsonObjectId); + + auto bson_timestamp_value = BsonTimestamp(1, 2); + ASSERT_EQ(GetTypeOrder(*bson_timestamp_value), TypeOrder::kBsonTimestamp); + ASSERT_EQ(DetectMapType(*bson_timestamp_value), MapType::kBsonTimestamp); + + auto bson_binary_data_value = BsonBinaryData(1, {1, 2, 3}); + ASSERT_EQ(GetTypeOrder(*bson_binary_data_value), TypeOrder::kBsonBinaryData); + ASSERT_EQ(DetectMapType(*bson_binary_data_value), MapType::kBsonBinaryData); } #if __APPLE__ @@ -210,6 +259,7 @@ TEST_F(ValueUtilTest, Equality) { std::vector> equals_group; Add(equals_group, nullptr, nullptr); + Add(equals_group, MinKey(), MinKey()); Add(equals_group, false, false); Add(equals_group, true, true); Add(equals_group, std::numeric_limits::quiet_NaN(), @@ -222,6 +272,8 @@ TEST_F(ValueUtilTest, Equality) { // Doubles and Longs aren't equal (even though they compare same). Add(equals_group, 1.0, 1.0); Add(equals_group, 1.1, 1.1); + Add(equals_group, Int32(-1), Int32(-1)); + Add(equals_group, Int32(1), Int32(1)); Add(equals_group, BlobValue(0, 1, 1)); Add(equals_group, BlobValue(0, 1)); Add(equals_group, "string", "string"); @@ -248,10 +300,21 @@ TEST_F(ValueUtilTest, Equality) { Add(equals_group, Array("foo")); Add(equals_group, Map("__type__", "__vector__", "value", Array()), DeepClone(MinVector())); + Add(equals_group, Regex("foo", "bar"), Regex("foo", "bar")); + Add(equals_group, BsonObjectId("bar")); + Add(equals_group, BsonObjectId("foo"), BsonObjectId("foo")); + Add(equals_group, BsonTimestamp(1, 3)); + Add(equals_group, BsonTimestamp(1, 2), BsonTimestamp(1, 2)); + Add(equals_group, BsonTimestamp(2, 3)); + Add(equals_group, BsonBinaryData(1, {7, 8, 9})); + Add(equals_group, BsonBinaryData(128, {7, 8, 9}), + BsonBinaryData(128, {7, 8, 9})); + Add(equals_group, BsonBinaryData(128, {7, 8, 10})); Add(equals_group, Map("bar", 1, "foo", 2), Map("bar", 1, "foo", 2)); Add(equals_group, Map("bar", 2, "foo", 1)); Add(equals_group, Map("bar", 1)); Add(equals_group, Map("foo", 1)); + Add(equals_group, MaxKey(), MaxKey()); for (size_t i = 0; i < equals_group.size(); ++i) { for (size_t j = i; j < equals_group.size(); ++j) { @@ -271,6 +334,9 @@ TEST_F(ValueUtilTest, StrictOrdering) { // null first Add(comparison_groups, nullptr); + // MinKey + Add(comparison_groups, MinKey()); + // booleans Add(comparison_groups, false); Add(comparison_groups, true); @@ -281,10 +347,12 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); Add(comparison_groups, 0.1); - // Doubles and longs Compare() the same. - Add(comparison_groups, 1.0, 1L); + // Doubles, longs, and Int32 Compare() the same. + Add(comparison_groups, 1.0, 1L, Int32(1)); + Add(comparison_groups, Int32(2)); + Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); Add(comparison_groups, 1e20); @@ -293,6 +361,12 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, kTimestamp1); Add(comparison_groups, kTimestamp2); + // BSON Timestamp + Add(comparison_groups, DeepClone(MinBsonTimestamp())); + Add(comparison_groups, BsonTimestamp(123, 4), BsonTimestamp(123, 4)); + Add(comparison_groups, BsonTimestamp(123, 5)); + Add(comparison_groups, BsonTimestamp(124, 0)); + // server timestamps come after all concrete timestamps. // NOTE: server timestamps can't be parsed with . Add(comparison_groups, EncodeServerTimestamp(kTimestamp1, absl::nullopt)); @@ -318,6 +392,13 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, BlobValue(0, 1, 2, 4, 3)); Add(comparison_groups, BlobValue(255)); + // BSON Binary Data + Add(comparison_groups, DeepClone(MinBsonBinaryData())); + Add(comparison_groups, BsonBinaryData(5, {1, 2, 3}), + BsonBinaryData(5, {1, 2, 3})); + Add(comparison_groups, BsonBinaryData(7, {1})); + Add(comparison_groups, BsonBinaryData(7, {2})); + // resource names Add(comparison_groups, DeepClone(MinReference())); Add(comparison_groups, RefValue(DbId("p1/d1"), Key("c1/doc1"))); @@ -327,6 +408,14 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, RefValue(DbId("p1/d2"), Key("c1/doc1"))); Add(comparison_groups, RefValue(DbId("p2/d1"), Key("c1/doc1"))); + // BSON ObjectId + Add(comparison_groups, DeepClone(MinBsonObjectId())); + Add(comparison_groups, BsonObjectId("foo"), BsonObjectId("foo")); + // TODO(types/ehsann): uncomment after string sort bug is fixed + // Add(comparison_groups, BsonObjectId("Ḟoo")); + // Add(comparison_groups, BsonObjectId("foo\u0301")); + Add(comparison_groups, BsonObjectId("xyz")); + // geo points Add(comparison_groups, GeoPoint(-90, -180)); Add(comparison_groups, GeoPoint(-90, 0)); @@ -341,8 +430,15 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, GeoPoint(90, 0)); Add(comparison_groups, GeoPoint(90, 180)); - // arrays - Add(comparison_groups, DeepClone(MinArray())); + // regular expressions + Add(comparison_groups, DeepClone(MinRegex())); + Add(comparison_groups, Regex("a", "bar1")), + Add(comparison_groups, Regex("foo", "bar1")), + Add(comparison_groups, Regex("foo", "bar2")), + Add(comparison_groups, Regex("go", "bar1")), + + // arrays + Add(comparison_groups, DeepClone(MinArray())); Add(comparison_groups, Array("bar")); Add(comparison_groups, Array("foo", 1)); Add(comparison_groups, Array("foo", 2)); @@ -363,7 +459,11 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, Map("foo", 1)); Add(comparison_groups, Map("foo", 2)); Add(comparison_groups, Map("foo", "0")); - Add(comparison_groups, DeepClone(MaxValue())); + + // MaxKey + Add(comparison_groups, MaxKey()); + + Add(comparison_groups, DeepClone(InternalMaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { for (size_t j = i; j < comparison_groups.size(); ++j) { @@ -386,6 +486,9 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { // null first Add(comparison_groups, DeepClone(NullValue())); Add(comparison_groups, nullptr); + + // MinKey + Add(comparison_groups, MinKey()); Add(comparison_groups, DeepClone(MinBoolean())); // booleans @@ -401,10 +504,12 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); Add(comparison_groups, 0.1); // Doubles and longs Compare() the same. - Add(comparison_groups, 1.0, 1L); + Add(comparison_groups, 1.0, 1L, Int32(1)); + Add(comparison_groups, Int32(2)); + Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); Add(comparison_groups, 1e20); Add(comparison_groups, DeepClone(MinTimestamp())); @@ -415,6 +520,12 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, kTimestamp1); Add(comparison_groups, kTimestamp2); + // BSON Timestamp + Add(comparison_groups, DeepClone(MinBsonTimestamp())); + Add(comparison_groups, BsonTimestamp(123, 4), BsonTimestamp(123, 4)); + Add(comparison_groups, BsonTimestamp(123, 5)); + Add(comparison_groups, BsonTimestamp(124, 0)); + // server timestamps come after all concrete timestamps. // NOTE: server timestamps can't be parsed with . Add(comparison_groups, EncodeServerTimestamp(kTimestamp1, absl::nullopt)); @@ -443,7 +554,13 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, BlobValue(0, 1, 2, 3, 4)); Add(comparison_groups, BlobValue(0, 1, 2, 4, 3)); Add(comparison_groups, BlobValue(255)); - Add(comparison_groups, DeepClone(MinReference())); + + // BSON Binary Data + Add(comparison_groups, DeepClone(MinBsonBinaryData())); + Add(comparison_groups, BsonBinaryData(5, {1, 2, 3}), + BsonBinaryData(5, {1, 2, 3})); + Add(comparison_groups, BsonBinaryData(7, {1})); + Add(comparison_groups, BsonBinaryData(7, {2})); // resource names Add(comparison_groups, DeepClone(MinReference())); @@ -453,7 +570,14 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, RefValue(DbId("p1/d1"), Key("c2/doc1"))); Add(comparison_groups, RefValue(DbId("p1/d2"), Key("c1/doc1"))); Add(comparison_groups, RefValue(DbId("p2/d1"), Key("c1/doc1"))); - Add(comparison_groups, DeepClone(MinGeoPoint())); + + // BSON ObjectId + Add(comparison_groups, DeepClone(MinBsonObjectId())); + Add(comparison_groups, BsonObjectId("foo"), BsonObjectId("foo")); + // TODO(types/ehsann): uncomment after string sort bug is fixed + // Add(comparison_groups, BsonObjectId("Ḟoo")); + // Add(comparison_groups, BsonObjectId("foo\u0301")); + Add(comparison_groups, BsonObjectId("xyz")); // geo points Add(comparison_groups, DeepClone(MinGeoPoint())); @@ -469,10 +593,16 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, GeoPoint(90, -180)); Add(comparison_groups, GeoPoint(90, 0)); Add(comparison_groups, GeoPoint(90, 180)); - Add(comparison_groups, DeepClone(MinArray())); - // arrays - Add(comparison_groups, DeepClone(MinArray())); + // regular expressions + Add(comparison_groups, DeepClone(MinRegex())); + Add(comparison_groups, Regex("a", "bar1")), + Add(comparison_groups, Regex("foo", "bar1")), + Add(comparison_groups, Regex("foo", "bar2")), + Add(comparison_groups, Regex("go", "bar1")), + + // arrays + Add(comparison_groups, DeepClone(MinArray())); Add(comparison_groups, Array("bar")); Add(comparison_groups, Array("foo", 1)); Add(comparison_groups, Array("foo", 2)); @@ -481,11 +611,9 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { // vectors Add(comparison_groups, DeepClone(MinVector())); - Add(comparison_groups, Map("__type__", "__vector__", "value", Array(100))); - Add(comparison_groups, - Map("__type__", "__vector__", "value", Array(1.0, 2.0, 3.0))); - Add(comparison_groups, - Map("__type__", "__vector__", "value", Array(1.0, 3.0, 2.0))); + Add(comparison_groups, VectorType(100)); + Add(comparison_groups, VectorType(1.0, 2.0, 3.0)); + Add(comparison_groups, VectorType(1.0, 3.0, 2.0)); // objects Add(comparison_groups, DeepClone(MinMap())); @@ -494,7 +622,12 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, Map("foo", 1)); Add(comparison_groups, Map("foo", 2)); Add(comparison_groups, Map("foo", "0")); - Add(comparison_groups, DeepClone(MaxValue())); + + // MaxKey + Add(comparison_groups, MaxKey()); + + // MaxValue (internal) + Add(comparison_groups, DeepClone(InternalMaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { for (size_t j = i; j < comparison_groups.size(); ++j) { @@ -503,6 +636,187 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { } } +TEST_F(ValueUtilTest, ComputesLowerBound) { + auto GetLowerBoundMessage = [](Message value) { + return DeepClone(GetLowerBound(*value)); + }; + + std::vector> groups; + + // Lower bound of null is null + Add(groups, DeepClone(NullValue()), + GetLowerBoundMessage(DeepClone(NullValue()))); + + // Lower bound of MinKey is MinKey + Add(groups, MinKey(), GetLowerBoundMessage(DeepClone(MinKeyValue())), + DeepClone(MinKeyValue())); + + // Booleans + Add(groups, false, GetLowerBoundMessage(Value(true))); + Add(groups, true); + + // Numbers + Add(groups, GetLowerBoundMessage(Value(0.0)), GetLowerBoundMessage(Value(0L)), + GetLowerBoundMessage(Int32(0)), std::nan(""), DeepClone(MinNumber())); + Add(groups, INT_MIN); + + // Timestamps + Add(groups, GetLowerBoundMessage(Value(kTimestamp1)), + DeepClone(MinTimestamp())); + Add(groups, kTimestamp1); + + // BSON Timestamps + Add(groups, GetLowerBoundMessage(BsonTimestamp(500, 600)), + BsonTimestamp(0, 0), DeepClone(MinBsonTimestamp())); + Add(groups, BsonTimestamp(1, 1)); + + // Strings + Add(groups, GetLowerBoundMessage(Value("Z")), "", DeepClone(MinString())); + Add(groups, "\u0000"); + + // Blobs + Add(groups, GetLowerBoundMessage(BlobValue(1, 2, 3)), BlobValue(), + DeepClone(MinBytes())); + Add(groups, BlobValue(0)); + + // BSON Binary Data + Add(groups, GetLowerBoundMessage(BsonBinaryData(128, {128, 128})), + DeepClone(MinBsonBinaryData())); + Add(groups, BsonBinaryData(0, {0})); + + // References + Add(groups, GetLowerBoundMessage(RefValue(DbId("p1/d1"), Key("c1/doc1"))), + DeepClone(MinReference())); + Add(groups, RefValue(DbId(), Key("a/a"))); + + // BSON Object Ids + Add(groups, GetLowerBoundMessage(BsonObjectId("ZZZ")), BsonObjectId(""), + DeepClone(MinBsonObjectId())); + Add(groups, BsonObjectId("a")); + + // GeoPoints + Add(groups, GetLowerBoundMessage(Value(GeoPoint(30, 60))), + GeoPoint(-90, -180), DeepClone(MinGeoPoint())); + Add(groups, GeoPoint(-90, 0)); + + // Regular Expressions + Add(groups, GetLowerBoundMessage(Regex("ZZZ", "i")), Regex("", ""), + DeepClone(MinRegex())); + Add(groups, Regex("a", "i")); + + // Arrays + Add(groups, GetLowerBoundMessage(Value(Array())), Array(), + DeepClone(MinArray())); + Add(groups, Array(false)); + + // Vectors + Add(groups, GetLowerBoundMessage(VectorType(1.0)), VectorType(), + DeepClone(MinVector())); + Add(groups, VectorType(1.0)); + + // Maps + Add(groups, GetLowerBoundMessage(Map()), Map(), DeepClone(MinMap())); + Add(groups, Map("a", "b")); + + // MaxKey + Add(groups, MaxKey(), GetLowerBoundMessage(DeepClone(MaxKeyValue())), + DeepClone(MaxKeyValue())); + + for (size_t i = 0; i < groups.size(); ++i) { + for (size_t j = i; j < groups.size(); ++j) { + VerifyRelaxedAscending(groups[i], groups[j]); + } + } +} + +TEST_F(ValueUtilTest, ComputesUpperBound) { + auto GetUpperBoundMessage = [](Message value) { + return DeepClone(GetUpperBound(*value)); + }; + + std::vector> groups; + + // Null first + Add(groups, DeepClone(NullValue())); + + // The upper bound of null is MinKey + Add(groups, MinKey(), GetUpperBoundMessage(DeepClone(NullValue()))); + + // The upper bound of MinKey is boolean `false` + Add(groups, false, GetUpperBoundMessage(MinKey())); + + // Booleans + Add(groups, true); + Add(groups, GetUpperBoundMessage(Value(false))); + + // Numbers + Add(groups, INT_MAX); + Add(groups, GetUpperBoundMessage(Value(INT_MAX)), + GetUpperBoundMessage(Value(0L)), GetUpperBoundMessage(Int32(0)), + GetUpperBoundMessage(Value(std::nan("")))); + + // Timestamps + Add(groups, kTimestamp1); + Add(groups, GetUpperBoundMessage(Value(kTimestamp1))); + + // BSON Timestamps + Add(groups, BsonTimestamp(4294967295, 4294967295)); // largest BSON Timestamp + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonTimestamp()))); + + // Strings + Add(groups, "\u0000"); + Add(groups, GetUpperBoundMessage(DeepClone(MinString()))); + + // Blobs + Add(groups, BlobValue(255)); + Add(groups, GetUpperBoundMessage(BlobValue())); + + // BSON Binary Data + Add(groups, BsonBinaryData(255, {255, 255})); + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonBinaryData()))); + + // References + Add(groups, DeepClone(MinReference())); + Add(groups, RefValue(DbId(), Key("c/d"))); + Add(groups, GetUpperBoundMessage(RefValue(DbId(), Key("a/b")))); + + // BSON Object Ids + Add(groups, BsonObjectId("foo")); + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonObjectId()))); + + // GeoPoints + Add(groups, GeoPoint(90, 180)); + Add(groups, GetUpperBoundMessage(DeepClone(MinGeoPoint()))); + + // Regular Expressions + Add(groups, Regex("a", "i")); + Add(groups, GetUpperBoundMessage(DeepClone(MinRegex()))); + + // Arrays + Add(groups, Array(false)); + Add(groups, GetUpperBoundMessage(DeepClone(MinArray()))); + + // Vectors + Add(groups, VectorType(1.0, 2.0, 3.0)); + Add(groups, GetUpperBoundMessage(DeepClone(MinVector()))); + + // Maps + Add(groups, Map("a", "b")); + Add(groups, GetUpperBoundMessage(DeepClone(MinMap()))); + + // MaxKey + Add(groups, MaxKey()); + + // The upper bound of MaxKey is internal max value. + Add(groups, GetUpperBoundMessage(DeepClone(MaxKeyValue()))); + + for (size_t i = 0; i < groups.size(); ++i) { + for (size_t j = i; j < groups.size(); ++j) { + VerifyRelaxedAscending(groups[i], groups[j]); + } + } +} + TEST_F(ValueUtilTest, CanonicalId) { VerifyCanonicalId(Value(nullptr), "null"); VerifyCanonicalId(Value(true), "true"); @@ -519,9 +833,17 @@ TEST_F(ValueUtilTest, CanonicalId) { VerifyCanonicalId(Map("a", 1, "b", 2, "c", "3"), "{a:1,b:2,c:3}"); VerifyCanonicalId(Map("a", Array("b", Map("c", GeoPoint(30, 60)))), "{a:[b,{c:geo(30.0,60.0)}]}"); - VerifyCanonicalId( - Map("__type__", "__vector__", "value", Array(1.0, 1.0, -2.0, 3.14)), - "{__type__:__vector__,value:[1.0,1.0,-2.0,3.1]}"); + VerifyCanonicalId(VectorType(1.0, 1.0, -2.0, 3.14), + "{__type__:__vector__,value:[1.0,1.0,-2.0,3.1]}"); + VerifyCanonicalId(MinKey(), "{__min__:null}"); + VerifyCanonicalId(MaxKey(), "{__max__:null}"); + VerifyCanonicalId(Regex("^foo", "x"), "{__regex__:{pattern:^foo,options:x}}"); + VerifyCanonicalId(Int32(123), "{__int__:123}"); + VerifyCanonicalId(BsonObjectId("foo"), "{__oid__:foo}"); + VerifyCanonicalId(BsonTimestamp(1, 2), + "{__request_timestamp__:{seconds:1,increment:2}}"); + // Binary representation: 128 = 0x80, 2 = 0x02, 3 = 0x03, 4 = 0x04 + VerifyCanonicalId(BsonBinaryData(128, {2, 3, 4}), "{__binary__:80020304}"); } TEST_F(ValueUtilTest, DeepClone) { diff --git a/Firestore/core/test/unit/remote/serializer_test.cc b/Firestore/core/test/unit/remote/serializer_test.cc index 14b08b1e13f..5bf758098b6 100644 --- a/Firestore/core/test/unit/remote/serializer_test.cc +++ b/Firestore/core/test/unit/remote/serializer_test.cc @@ -111,16 +111,23 @@ using nanopb::Writer; using remote::Serializer; using testutil::AndFilters; using testutil::Array; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::Bytes; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::Query; using testutil::Ref; +using testutil::Regex; using testutil::Value; using testutil::Version; using util::Status; @@ -821,6 +828,98 @@ TEST_F(SerializerTest, EncodesNestedObjects) { ExpectRoundTrip(model, proto, TypeOrder::kMap); } +TEST_F(SerializerTest, EncodesMinKey) { + Message model = MinKey(); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__min__"] = ValueProto(nullptr); + + ExpectRoundTrip(model, proto, TypeOrder::kMinKey); +} + +TEST_F(SerializerTest, EncodesMaxKey) { + Message model = MaxKey(); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__max__"] = ValueProto(nullptr); + + ExpectRoundTrip(model, proto, TypeOrder::kMaxKey); +} + +TEST_F(SerializerTest, EncodesRegexValue) { + Message model = Regex("^foo", "i"); + + v1::Value inner_map_proto; + google::protobuf::Map* inner_fields = + inner_map_proto.mutable_map_value()->mutable_fields(); + (*inner_fields)["pattern"] = ValueProto("^foo"); + (*inner_fields)["options"] = ValueProto("i"); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__regex__"] = inner_map_proto; + + ExpectRoundTrip(model, proto, TypeOrder::kRegex); +} + +TEST_F(SerializerTest, EncodesInt32Value) { + Message model = Int32(78); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__int__"] = ValueProto(78); + + ExpectRoundTrip(model, proto, TypeOrder::kNumber); +} + +TEST_F(SerializerTest, EncodesBsonObjectId) { + Message model = BsonObjectId("foo"); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__oid__"] = ValueProto("foo"); + + ExpectRoundTrip(model, proto, TypeOrder::kBsonObjectId); +} + +TEST_F(SerializerTest, EncodesBsonTimestamp) { + Message model = BsonTimestamp(123u, 456u); + + v1::Value inner_map_proto; + google::protobuf::Map* inner_fields = + inner_map_proto.mutable_map_value()->mutable_fields(); + (*inner_fields)["seconds"] = ValueProto(123); + (*inner_fields)["increment"] = ValueProto(456); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__request_timestamp__"] = inner_map_proto; + + ExpectRoundTrip(model, proto, TypeOrder::kBsonTimestamp); +} + +TEST_F(SerializerTest, EncodesBsonBinaryData) { + Message model = + BsonBinaryData(128u, {0x1, 0x2, 0x3}); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + std::vector concat{128, 1, 2, 3}; + (*fields)["__binary__"] = + ValueProto(ByteString(concat.data(), concat.size())); + + ExpectRoundTrip(model, proto, TypeOrder::kBsonBinaryData); +} + TEST_F(SerializerTest, EncodesVectorValue) { Message model = Map("__type__", "__vector__", "value", Array(1.0, 2.0, 3.0)); diff --git a/Firestore/core/test/unit/testutil/testutil.cc b/Firestore/core/test/unit/testutil/testutil.cc index 0e851af695d..7709abd33a3 100644 --- a/Firestore/core/test/unit/testutil/testutil.cc +++ b/Firestore/core/test/unit/testutil/testutil.cc @@ -106,6 +106,13 @@ Message BlobValue( return result; } +Message BlobValue(std::vector octets) { + Message result; + result->which_value_type = google_firestore_v1_Value_bytes_value_tag; + result->bytes_value = nanopb::MakeBytesArray(octets.data(), octets.size()); + return result; +} + } // namespace details ByteString Bytes(std::initializer_list octets) { @@ -185,6 +192,40 @@ Message Value(const model::ObjectValue& value) { return DeepClone(value.Get()); } +Message MinKey() { + return Map("__min__", nullptr); +} + +Message MaxKey() { + return Map("__max__", nullptr); +} + +Message Regex(std::string pattern, + std::string options) { + return Map("__regex__", Map("pattern", pattern, "options", options)); +} + +nanopb::Message Int32(int32_t value) { + return Map("__int__", Value(value)); +} + +nanopb::Message BsonObjectId(std::string oid) { + return Map("__oid__", Value(oid)); +} + +nanopb::Message BsonTimestamp(uint32_t seconds, + uint32_t increment) { + return Map("__request_timestamp__", + Map("seconds", Value(seconds), "increment", Value(increment))); +} + +nanopb::Message BsonBinaryData( + uint8_t subtype, std::initializer_list data) { + std::vector bytes{subtype}; + bytes.insert(bytes.end(), data.begin(), data.end()); + return Map("__binary__", details::BlobValue(bytes)); +} + ObjectValue WrapObject(Message value) { return ObjectValue{std::move(value)}; } diff --git a/Firestore/core/test/unit/testutil/testutil.h b/Firestore/core/test/unit/testutil/testutil.h index 234ef3d5d12..dd3924434dc 100644 --- a/Firestore/core/test/unit/testutil/testutil.h +++ b/Firestore/core/test/unit/testutil/testutil.h @@ -294,6 +294,17 @@ nanopb::Message VectorType(Args&&... values) { details::MakeArray(std::move(values)...)); } +nanopb::Message MinKey(); +nanopb::Message MaxKey(); +nanopb::Message Regex(std::string pattern, + std::string options); +nanopb::Message Int32(int32_t value); +nanopb::Message BsonObjectId(std::string oid); +nanopb::Message BsonTimestamp(uint32_t seconds, + uint32_t increment); +nanopb::Message BsonBinaryData( + uint8_t subtype, std::initializer_list data); + model::DocumentKey Key(absl::string_view path); model::FieldPath Field(absl::string_view field);