Skip to content

Commit 2052506

Browse files
ParkMyCarSean Loiselle
andauthored
test: Several small test fixes to get nightly green (#23906)
Before cutting the `v0.80` release we want to get Nightly green Included is a hack to get a previously unstable, now totally deleted, Kafka connection config option working. We used to support writing `TOPIC METADATA REFRESH INTERVAL MS 500` now we support only `TOPIC METADATA REFRESH INTERVAL = '500ms'`. To get environments that might have had the old syntax (it was an unstable option so there shouldn't be any) we support parsing the `MS` keyword and then in a Catalog migration swap from a number `500` to the string `500ms`. ### Checklist - [ ] This PR has adequate test coverage / QA involvement has been duly considered. - [ ] This PR has an associated up-to-date [design doc](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/design/README.md), is a design doc ([template](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/design/00000000_template.md)), or is sufficiently small to not require a design. <!-- Reference the design in the description. --> - [ ] If this PR evolves [an existing `$T ⇔ Proto$T` mapping](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/command-and-response-binary-encoding.md) (possibly in a backwards-incompatible way), then it is tagged with a `T-proto` label. - [ ] If this PR will require changes to cloud orchestration or tests, there is a companion cloud PR to account for those changes that is tagged with the release-blocker label ([example](https://github.com/MaterializeInc/cloud/pull/5021)). <!-- Ask in #team-cloud on Slack if you need help preparing the cloud PR. --> - [x] This PR includes the following [user-facing behavior changes](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/guide-changes.md#what-changes-require-a-release-note): - N/a test only fixes --------- Co-authored-by: Sean Loiselle <[email protected]>
1 parent a05ad99 commit 2052506

File tree

8 files changed

+27
-10
lines changed

8 files changed

+27
-10
lines changed

ci/nightly/pipeline.template.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ steps:
384384
- group: AWS
385385
key: aws
386386
steps:
387-
- id: aws
387+
- id: aws-checks
388388
label: AWS
389389
timeout_in_minutes: 30
390390
agents:

misc/python/materialize/checks/all_checks/aws.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,15 @@
1212

1313
from materialize.checks.actions import Testdrive
1414
from materialize.checks.checks import Check, externally_idempotent
15+
from materialize.checks.executors import Executor
16+
from materialize.mz_version import MzVersion
1517

1618

1719
@externally_idempotent(False)
1820
class AwsConnection(Check):
21+
def _can_run(self, e: Executor) -> bool:
22+
return self.base_version >= MzVersion.parse_mz("v0.80.0-dev")
23+
1924
def initialize(self) -> Testdrive:
2025
return Testdrive(
2126
dedent(

misc/python/materialize/checks/all_checks/multiple_partitions.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@
1111
from materialize.checks.actions import Testdrive
1212
from materialize.checks.checks import Check, externally_idempotent
1313
from materialize.checks.common import KAFKA_SCHEMA_WITH_SINGLE_STRING_FIELD
14+
from materialize.checks.executors import Executor
15+
from materialize.mz_version import MzVersion
1416

1517

1618
def schemas() -> str:
@@ -21,6 +23,13 @@ def schemas() -> str:
2123
class MultiplePartitions(Check):
2224
"""Test that adds new partitions to a Kafka source"""
2325

26+
def _can_run(self, e: Executor) -> bool:
27+
# v0.80.0 introduced backward incompatible changes to `TOPIC METADATA
28+
# REFRESH INTERVAL`, which was never available to customers, so rather
29+
# than try to introduce hacks to support it, we simply disable tests
30+
# that used
31+
return self.base_version >= MzVersion.parse_mz("v0.80.0-dev")
32+
2433
def initialize(self) -> Testdrive:
2534
return Testdrive(
2635
schemas()
@@ -35,10 +44,11 @@ def initialize(self) -> Testdrive:
3544
$ kafka-ingest format=avro key-format=avro topic=multiple-partitions-topic key-schema=${keyschema} schema=${schema} repeat=100
3645
{"key1": "A${kafka-ingest.iteration}"} {"f1": "A${kafka-ingest.iteration}"}
3746
38-
> CREATE SOURCE multiple_partitions_source
39-
FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-multiple-partitions-topic-${testdrive.seed}', TOPIC METADATA REFRESH INTERVAL '500ms')
40-
FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn
41-
ENVELOPE UPSERT
47+
# Note: we use "postgres-execute" here instead of ">" because for commands run with
48+
# the ">" testdrive parses them with the SQL parser from `main`, and the SQL for
49+
# this command is version dependent.
50+
$ postgres-execute connection=postgres://materialize:materialize@${testdrive.materialize-sql-addr}
51+
CREATE SOURCE multiple_partitions_source FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-multiple-partitions-topic-${testdrive.seed}', TOPIC METADATA REFRESH INTERVAL '500ms') FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn ENVELOPE UPSERT;
4252
4353
$ kafka-add-partitions topic=multiple-partitions-topic total-partitions=2
4454

misc/python/materialize/cli/ci_logged_errors_detect.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@
9393
# Old versions won't support new parameters
9494
| (platform-checks|legacy-upgrade|upgrade-matrix|feature-benchmark)-materialized-.* \| .*cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage
9595
# For platform-checks upgrade tests
96-
| cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage(\ to\ set\ (default|configured)\ parameter)?\ name=enable_dangerous_functions
96+
| cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage(\ to\ set\ (default|configured)\ parameter)?
9797
| internal\ error:\ no\ AWS\ external\ ID\ prefix\ configured
9898
| failed\ writing\ row\ to\ mz_aws_connections.*no\ AWS\ external\ ID\ prefix\ configured
9999
)

src/adapter/src/catalog/open.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1567,7 +1567,10 @@ impl Catalog {
15671567
};
15681568
let name = state.resolve_full_name(&name, None);
15691569
return Err(Error::new(ErrorKind::Corruption {
1570-
detail: format!("failed to deserialize item {} ({}): {}", item.id, name, e),
1570+
detail: format!(
1571+
"failed to deserialize item {} ({}): {}\n\n{}",
1572+
item.id, name, e, item.create_sql
1573+
),
15711574
}));
15721575
}
15731576
};

test/legacy-upgrade/check-from-v0.27.0-kafka-sink.td

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
# the Business Source License, use of this software will be governed
88
# by the Apache License, Version 2.0.
99

10-
> SHOW CREATE SINK upgrade_kafka_sink;
10+
>[version<8000] SHOW CREATE SINK upgrade_kafka_sink;
1111
"materialize.public.upgrade_kafka_sink" "CREATE SINK \"materialize\".\"public\".\"upgrade_kafka_sink\" FROM \"materialize\".\"public\".\"static_view\" INTO KAFKA CONNECTION \"materialize\".\"public\".\"kafka_conn\" (TOPIC = 'upgrade-kafka-sink') FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION \"materialize\".\"public\".\"csr_conn\" ENVELOPE DEBEZIUM"
1212

1313
# Test that the "disk" option on the linked cluster defaults to false

test/sqllogictest/explain/optimized_plan_as_text_redacted.slt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -938,4 +938,3 @@ Used Indexes:
938938
- materialize.public.t_a_idx_1 (*** full scan ***)
939939

940940
EOF
941-

test/testdrive/catalog.td

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -686,7 +686,7 @@ test_table
686686

687687
# `SHOW TABLES` and `mz_tables` should agree.
688688
> SELECT COUNT(*) FROM mz_tables WHERE id LIKE 's%'
689-
48
689+
49
690690

691691
# There is one entry in mz_indexes for each field_number/expression of the index.
692692
> SELECT COUNT(id) FROM mz_indexes WHERE id LIKE 's%'

0 commit comments

Comments
 (0)