Skip to content

Commit

Permalink
test: Several small test fixes to get nightly green (#23906)
Browse files Browse the repository at this point in the history
Before cutting the `v0.80` release we want to get Nightly green

Included is a hack to get a previously unstable, now totally deleted,
Kafka connection config option working. We used to support writing
`TOPIC METADATA REFRESH INTERVAL MS 500` now we support only `TOPIC
METADATA REFRESH INTERVAL = '500ms'`. To get environments that might
have had the old syntax (it was an unstable option so there shouldn't be
any) we support parsing the `MS` keyword and then in a Catalog migration
swap from a number `500` to the string `500ms`.

### Checklist

- [ ] This PR has adequate test coverage / QA involvement has been duly
considered.
- [ ] This PR has an associated up-to-date [design
doc](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/design/README.md),
is a design doc
([template](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/design/00000000_template.md)),
or is sufficiently small to not require a design.
  <!-- Reference the design in the description. -->
- [ ] If this PR evolves [an existing `$T ⇔ Proto$T`
mapping](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/command-and-response-binary-encoding.md)
(possibly in a backwards-incompatible way), then it is tagged with a
`T-proto` label.
- [ ] If this PR will require changes to cloud orchestration or tests,
there is a companion cloud PR to account for those changes that is
tagged with the release-blocker label
([example](MaterializeInc/cloud#5021)).
<!-- Ask in #team-cloud on Slack if you need help preparing the cloud
PR. -->
- [x] This PR includes the following [user-facing behavior
changes](https://github.com/MaterializeInc/materialize/blob/main/doc/developer/guide-changes.md#what-changes-require-a-release-note):
  - N/a test only fixes

---------

Co-authored-by: Sean Loiselle <[email protected]>
  • Loading branch information
ParkMyCar and Sean Loiselle authored Dec 15, 2023
1 parent a05ad99 commit 2052506
Show file tree
Hide file tree
Showing 8 changed files with 27 additions and 10 deletions.
2 changes: 1 addition & 1 deletion ci/nightly/pipeline.template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ steps:
- group: AWS
key: aws
steps:
- id: aws
- id: aws-checks
label: AWS
timeout_in_minutes: 30
agents:
Expand Down
5 changes: 5 additions & 0 deletions misc/python/materialize/checks/all_checks/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,15 @@

from materialize.checks.actions import Testdrive
from materialize.checks.checks import Check, externally_idempotent
from materialize.checks.executors import Executor
from materialize.mz_version import MzVersion


@externally_idempotent(False)
class AwsConnection(Check):
def _can_run(self, e: Executor) -> bool:
return self.base_version >= MzVersion.parse_mz("v0.80.0-dev")

def initialize(self) -> Testdrive:
return Testdrive(
dedent(
Expand Down
18 changes: 14 additions & 4 deletions misc/python/materialize/checks/all_checks/multiple_partitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from materialize.checks.actions import Testdrive
from materialize.checks.checks import Check, externally_idempotent
from materialize.checks.common import KAFKA_SCHEMA_WITH_SINGLE_STRING_FIELD
from materialize.checks.executors import Executor
from materialize.mz_version import MzVersion


def schemas() -> str:
Expand All @@ -21,6 +23,13 @@ def schemas() -> str:
class MultiplePartitions(Check):
"""Test that adds new partitions to a Kafka source"""

def _can_run(self, e: Executor) -> bool:
# v0.80.0 introduced backward incompatible changes to `TOPIC METADATA
# REFRESH INTERVAL`, which was never available to customers, so rather
# than try to introduce hacks to support it, we simply disable tests
# that used
return self.base_version >= MzVersion.parse_mz("v0.80.0-dev")

def initialize(self) -> Testdrive:
return Testdrive(
schemas()
Expand All @@ -35,10 +44,11 @@ def initialize(self) -> Testdrive:
$ kafka-ingest format=avro key-format=avro topic=multiple-partitions-topic key-schema=${keyschema} schema=${schema} repeat=100
{"key1": "A${kafka-ingest.iteration}"} {"f1": "A${kafka-ingest.iteration}"}
> CREATE SOURCE multiple_partitions_source
FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-multiple-partitions-topic-${testdrive.seed}', TOPIC METADATA REFRESH INTERVAL '500ms')
FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn
ENVELOPE UPSERT
# Note: we use "postgres-execute" here instead of ">" because for commands run with
# the ">" testdrive parses them with the SQL parser from `main`, and the SQL for
# this command is version dependent.
$ postgres-execute connection=postgres://materialize:materialize@${testdrive.materialize-sql-addr}
CREATE SOURCE multiple_partitions_source FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-multiple-partitions-topic-${testdrive.seed}', TOPIC METADATA REFRESH INTERVAL '500ms') FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION csr_conn ENVELOPE UPSERT;
$ kafka-add-partitions topic=multiple-partitions-topic total-partitions=2
Expand Down
2 changes: 1 addition & 1 deletion misc/python/materialize/cli/ci_logged_errors_detect.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
# Old versions won't support new parameters
| (platform-checks|legacy-upgrade|upgrade-matrix|feature-benchmark)-materialized-.* \| .*cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage
# For platform-checks upgrade tests
| cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage(\ to\ set\ (default|configured)\ parameter)?\ name=enable_dangerous_functions
| cannot\ load\ unknown\ system\ parameter\ from\ catalog\ storage(\ to\ set\ (default|configured)\ parameter)?
| internal\ error:\ no\ AWS\ external\ ID\ prefix\ configured
| failed\ writing\ row\ to\ mz_aws_connections.*no\ AWS\ external\ ID\ prefix\ configured
)
Expand Down
5 changes: 4 additions & 1 deletion src/adapter/src/catalog/open.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1567,7 +1567,10 @@ impl Catalog {
};
let name = state.resolve_full_name(&name, None);
return Err(Error::new(ErrorKind::Corruption {
detail: format!("failed to deserialize item {} ({}): {}", item.id, name, e),
detail: format!(
"failed to deserialize item {} ({}): {}\n\n{}",
item.id, name, e, item.create_sql
),
}));
}
};
Expand Down
2 changes: 1 addition & 1 deletion test/legacy-upgrade/check-from-v0.27.0-kafka-sink.td
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# the Business Source License, use of this software will be governed
# by the Apache License, Version 2.0.

> SHOW CREATE SINK upgrade_kafka_sink;
>[version<8000] SHOW CREATE SINK upgrade_kafka_sink;
"materialize.public.upgrade_kafka_sink" "CREATE SINK \"materialize\".\"public\".\"upgrade_kafka_sink\" FROM \"materialize\".\"public\".\"static_view\" INTO KAFKA CONNECTION \"materialize\".\"public\".\"kafka_conn\" (TOPIC = 'upgrade-kafka-sink') FORMAT AVRO USING CONFLUENT SCHEMA REGISTRY CONNECTION \"materialize\".\"public\".\"csr_conn\" ENVELOPE DEBEZIUM"

# Test that the "disk" option on the linked cluster defaults to false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -938,4 +938,3 @@ Used Indexes:
- materialize.public.t_a_idx_1 (*** full scan ***)

EOF

2 changes: 1 addition & 1 deletion test/testdrive/catalog.td
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,7 @@ test_table

# `SHOW TABLES` and `mz_tables` should agree.
> SELECT COUNT(*) FROM mz_tables WHERE id LIKE 's%'
48
49

# There is one entry in mz_indexes for each field_number/expression of the index.
> SELECT COUNT(id) FROM mz_indexes WHERE id LIKE 's%'
Expand Down

0 comments on commit 2052506

Please sign in to comment.