Skip to content

Commit 79f844b

Browse files
authored
chore(py): add unused-noqa lint rule (#12615)
1 parent 2d762f0 commit 79f844b

File tree

32 files changed

+42
-35
lines changed

32 files changed

+42
-35
lines changed

metadata-ingestion-modules/airflow-plugin/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ extend-select = [
4444
"G010", # logging.warn -> logging.warning
4545
"I", # isort
4646
"TID", # flake8-tidy-imports
47+
"RUF100", # unused-noqa
4748
]
4849
ignore = [
4950
"E501", # Line length violations (handled by formatter)

metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@
6060
# To placate mypy on Airflow versions that don't have the listener API,
6161
# we define a dummy hookimpl that's an identity function.
6262

63-
def hookimpl(f: _F) -> _F: # type: ignore[misc] # noqa: F811
63+
def hookimpl(f: _F) -> _F: # type: ignore[misc]
6464
return f
6565

6666
else:

metadata-ingestion-modules/airflow-plugin/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import pathlib
22
import site
33

4-
from datahub.testing.pytest_hooks import ( # noqa: F401,E402
4+
from datahub.testing.pytest_hooks import ( # noqa: F401
55
load_golden_flags,
66
pytest_addoption,
77
)

metadata-ingestion-modules/dagster-plugin/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ extend-select = [
4444
"G010", # logging.warn -> logging.warning
4545
"I", # isort
4646
"TID", # flake8-tidy-imports
47+
"RUF100", # unused-noqa
4748
]
4849
ignore = [
4950
"E501", # Line length violations (handled by formatter)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from datahub.testing.pytest_hooks import ( # noqa: F401,E402
1+
from datahub.testing.pytest_hooks import ( # noqa: F401
22
load_golden_flags,
33
pytest_addoption,
44
)

metadata-ingestion-modules/gx-plugin/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ extend-select = [
4444
"G010", # logging.warn -> logging.warning
4545
"I", # isort
4646
"TID", # flake8-tidy-imports
47+
"RUF100", # unused-noqa
4748
]
4849
ignore = [
4950
"E501", # Line length violations (handled by formatter)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from datahub.testing.docker_utils import docker_compose_runner # noqa: F401
2-
from datahub.testing.pytest_hooks import ( # noqa: F401,E402
2+
from datahub.testing.pytest_hooks import ( # noqa: F401
33
load_golden_flags,
44
pytest_addoption,
55
)

metadata-ingestion-modules/prefect-plugin/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ extend-select = [
4444
"G010", # logging.warn -> logging.warning
4545
"I", # isort
4646
"TID", # flake8-tidy-imports
47+
"RUF100", # unused-noqa
4748
]
4849
ignore = [
4950
"E501", # Line length violations (handled by formatter)

metadata-ingestion/pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ extend-select = [
2929
"G010", # logging.warn -> logging.warning
3030
"I", # isort
3131
"TID", # flake8-tidy-imports
32+
"RUF100", # unused-noqa
3233
]
3334
extend-ignore = [
3435
"E501", # Handled by formatter
@@ -38,6 +39,8 @@ extend-ignore = [
3839
"E203", # Ignore whitespace before ':' (matches Black)
3940
"B019", # Allow usages of functools.lru_cache
4041
"B008", # Allow function call in argument defaults
42+
"RUF012", # mutable-class-default; incompatible with pydantic
43+
"RUF015", # unnecessary-iterable-allocation-for-first-element
4144
# TODO: Enable these later
4245
"B006", # Mutable args
4346
"B017", # Do not assert blind exception

metadata-ingestion/src/datahub/cli/docker_cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,7 @@ def detect_quickstart_arch(arch: Optional[str]) -> Architectures:
430430
return quickstart_arch
431431

432432

433-
@docker.command() # noqa: C901
433+
@docker.command()
434434
@click.option(
435435
"--version",
436436
type=str,
@@ -592,7 +592,7 @@ def detect_quickstart_arch(arch: Optional[str]) -> Architectures:
592592
"arch",
593593
]
594594
)
595-
def quickstart( # noqa: C901
595+
def quickstart(
596596
version: Optional[str],
597597
build_locally: bool,
598598
pull_images: bool,

metadata-ingestion/src/datahub/configuration/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from pydantic.fields import Field
2121
from typing_extensions import Protocol, Self
2222

23-
from datahub.configuration._config_enum import ConfigEnum as ConfigEnum # noqa: I250
23+
from datahub.configuration._config_enum import ConfigEnum as ConfigEnum
2424
from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2
2525
from datahub.utilities.dedup_list import deduplicate_list
2626

metadata-ingestion/src/datahub/ingestion/api/decorators.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import Callable, Dict, Optional, Type
44

55
from datahub.ingestion.api.common import PipelineContext
6-
from datahub.ingestion.api.source import ( # noqa: I250
6+
from datahub.ingestion.api.source import (
77
Source,
88
SourceCapability as SourceCapability,
99
)

metadata-ingestion/src/datahub/ingestion/graph/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
from datahub.emitter.mcp import MetadataChangeProposalWrapper
3434
from datahub.emitter.rest_emitter import DatahubRestEmitter
3535
from datahub.emitter.serialization_helper import post_json_transform
36-
from datahub.ingestion.graph.config import ( # noqa: I250; TODO: Remove this alias
36+
from datahub.ingestion.graph.config import (
3737
DatahubClientConfig as DatahubClientConfig,
3838
)
3939
from datahub.ingestion.graph.connections import (

metadata-ingestion/src/datahub/ingestion/run/pipeline.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,7 @@ def _time_to_print(self) -> bool:
439439
return True
440440
return False
441441

442-
def run(self) -> None: # noqa: C901
442+
def run(self) -> None:
443443
with contextlib.ExitStack() as stack:
444444
if self.config.flags.generate_memory_profiles:
445445
import memray

metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -923,15 +923,15 @@ def from_dict(
923923
tags=cast(List, dict.get("tags")) if dict.get("tags") is not None else [],
924924
)
925925

926-
@classmethod # noqa: C901
926+
@classmethod
927927
def from_api( # noqa: C901
928928
cls,
929929
model: str,
930930
explore_name: str,
931931
client: LookerAPI,
932932
reporter: SourceReport,
933933
source_config: LookerDashboardSourceConfig,
934-
) -> Optional["LookerExplore"]: # noqa: C901
934+
) -> Optional["LookerExplore"]:
935935
try:
936936
explore = client.lookml_model_explore(model, explore_name)
937937
views: Set[str] = set()
@@ -1183,7 +1183,7 @@ def _get_embed_url(self, base_url: str) -> str:
11831183
base_url = remove_port_from_url(base_url)
11841184
return f"{base_url}/embed/explore/{self.model_name}/{self.name}"
11851185

1186-
def _to_metadata_events( # noqa: C901
1186+
def _to_metadata_events(
11871187
self,
11881188
config: LookerCommonConfig,
11891189
reporter: SourceReport,

metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ def add_reachable_explore(self, model: str, explore: str, via: str) -> None:
383383

384384
self.reachable_explores[(model, explore)].append(via)
385385

386-
def _get_looker_dashboard_element( # noqa: C901
386+
def _get_looker_dashboard_element(
387387
self, element: DashboardElement
388388
) -> Optional[LookerDashboardElement]:
389389
# Dashboard elements can use raw usage_queries against explores

metadata-ingestion/src/datahub/ingestion/source/nifi.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -488,7 +488,7 @@ def rest_api_base_url(self):
488488
def get_report(self) -> SourceReport:
489489
return self.report
490490

491-
def update_flow(self, pg_flow_dto: Dict, recursion_level: int = 0) -> None: # noqa: C901
491+
def update_flow(self, pg_flow_dto: Dict, recursion_level: int = 0) -> None:
492492
"""
493493
Update self.nifi_flow with contents of the input process group `pg_flow_dto`
494494
"""
@@ -894,7 +894,7 @@ def delete_provenance(self, provenance_uri):
894894
if not delete_response.ok:
895895
logger.error("failed to delete provenance ", provenance_uri)
896896

897-
def construct_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901
897+
def construct_workunits(self) -> Iterable[MetadataWorkUnit]:
898898
rootpg = self.nifi_flow.root_process_group
899899
flow_name = rootpg.name # self.config.site_name
900900
flow_urn = self.make_flow_urn()

metadata-ingestion/src/datahub/ingestion/source/openapi.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ def build_wu(
270270
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
271271
return ApiWorkUnit(id=dataset_name, mce=mce)
272272

273-
def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901
273+
def get_workunits_internal(self) -> Iterable[ApiWorkUnit]:
274274
config = self.config
275275

276276
sw_dict = self.config.get_swagger()

metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def check_sw_version(sw_dict: dict) -> None:
111111
)
112112

113113

114-
def get_endpoints(sw_dict: dict) -> dict: # noqa: C901
114+
def get_endpoints(sw_dict: dict) -> dict:
115115
"""
116116
Get all the URLs, together with their description and the tags
117117
"""

metadata-ingestion/src/datahub/ingestion/source/powerbi_report_server/report_server_domain.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ class CatalogItem(BaseModel):
3333
)
3434

3535
@validator("display_name", always=True)
36-
def validate_diplay_name(cls, value, values): # noqa: N805
36+
def validate_diplay_name(cls, value, values):
3737
if values["created_by"]:
3838
return values["created_by"].split("\\")[-1]
3939
return ""

metadata-ingestion/src/datahub/ingestion/source/sql/athena.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555
except ImportError:
5656
_F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
5757

58-
def override(f: _F, /) -> _F: # noqa: F811
58+
def override(f: _F, /) -> _F:
5959
return f
6060

6161

@@ -104,7 +104,7 @@ def get_view_definition(self, connection, view_name, schema=None, **kw):
104104
return "\n".join([r for r in res])
105105

106106
@typing.no_type_check
107-
def _get_column_type(self, type_: Union[str, Dict[str, Any]]) -> TypeEngine: # noqa: C901
107+
def _get_column_type(self, type_: Union[str, Dict[str, Any]]) -> TypeEngine:
108108
"""Derives the data type of the Athena column.
109109
110110
This method is overwritten to extend the behavior of PyAthena.

metadata-ingestion/src/datahub/ingestion/source/sql/hive_metastore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@
6767

6868

6969
class HiveMetastoreConfigMode(StrEnum):
70-
hive: str = "hive" # noqa: F811
70+
hive: str = "hive"
7171
presto: str = "presto"
7272
presto_on_hive: str = "presto-on-hive"
7373
trino: str = "trino"

metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,7 @@ def loop_job_steps(
401401
data_job.add_property(name=data_name, value=str(data_value))
402402
yield from self.construct_job_workunits(data_job)
403403

404-
def loop_stored_procedures( # noqa: C901
404+
def loop_stored_procedures(
405405
self,
406406
inspector: Inspector,
407407
schema: str,

metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -635,7 +635,7 @@ def make_data_reader(self, inspector: Inspector) -> Optional[DataReader]:
635635

636636
return None
637637

638-
def loop_tables( # noqa: C901
638+
def loop_tables(
639639
self,
640640
inspector: Inspector,
641641
schema: str,

metadata-ingestion/src/datahub/ingestion/source/sql/teradata.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -649,7 +649,7 @@ def __init__(self, config: TeradataConfig, ctx: PipelineContext):
649649
)
650650

651651
# Disabling the below because the cached view definition is not the view definition the column in tablesv actually holds the last statement executed against the object... not necessarily the view definition
652-
# setattr( # noqa: B010
652+
# setattr(
653653
# TeradataDialect,
654654
# "get_view_definition",
655655
# lambda self, connection, view_name, schema=None, **kw: optimized_get_view_definition(
@@ -746,7 +746,7 @@ def get_db_name(self, inspector: Inspector) -> str:
746746
else:
747747
raise Exception("Unable to get database name from Sqlalchemy inspector")
748748

749-
def cached_loop_tables( # noqa: C901
749+
def cached_loop_tables(
750750
self,
751751
inspector: Inspector,
752752
schema: str,
@@ -782,7 +782,7 @@ def cached_get_table_properties(
782782
break
783783
return description, properties, location
784784

785-
def cached_loop_views( # noqa: C901
785+
def cached_loop_views(
786786
self,
787787
inspector: Inspector,
788788
schema: str,

metadata-ingestion/src/datahub/sql_parsing/schema_resolver.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from datahub.ingestion.source.bigquery_v2.bigquery_audit import BigqueryTableIdentifier
1414
from datahub.metadata.schema_classes import SchemaFieldClass, SchemaMetadataClass
1515
from datahub.metadata.urns import DataPlatformUrn
16-
from datahub.sql_parsing._models import _TableName as _TableName # noqa: I250
16+
from datahub.sql_parsing._models import _TableName as _TableName
1717
from datahub.sql_parsing.sql_parsing_common import PLATFORMS_WITH_CASE_SENSITIVE_TABLES
1818
from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict
1919
from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path

metadata-ingestion/src/datahub/sql_parsing/sqlglot_lineage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -473,7 +473,7 @@ def _create_table_ddl_cll(
473473
return column_lineage
474474

475475

476-
def _select_statement_cll( # noqa: C901
476+
def _select_statement_cll(
477477
statement: _SupportedColumnLineageTypes,
478478
dialect: sqlglot.Dialect,
479479
root_scope: sqlglot.optimizer.Scope,

metadata-ingestion/src/datahub/upgrade/upgrade.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -293,9 +293,9 @@ def is_client_server_compatible(client: VersionStats, server: VersionStats) -> i
293293
return server.version.micro - client.version.micro
294294

295295

296-
def _maybe_print_upgrade_message( # noqa: C901
296+
def _maybe_print_upgrade_message(
297297
version_stats: Optional[DataHubVersionStats],
298-
) -> None: # noqa: C901
298+
) -> None:
299299
days_before_cli_stale = 7
300300
days_before_quickstart_stale = 7
301301

metadata-ingestion/src/datahub/utilities/mapping.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def __init__(
171171
self.owner_source_type = owner_source_type
172172
self.match_nested_props = match_nested_props
173173

174-
def process(self, raw_props: Mapping[str, Any]) -> Dict[str, Any]: # noqa: C901
174+
def process(self, raw_props: Mapping[str, Any]) -> Dict[str, Any]:
175175
# Defining the following local variables -
176176
# operations_map - the final resulting map when operations are processed.
177177
# Against each operation the values to be applied are stored.

metadata-ingestion/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@
4141
except ImportError:
4242
pass
4343

44-
import freezegun # noqa: F401,E402
44+
import freezegun # noqa: E402
4545

4646
# The freezegun library has incomplete type annotations.
4747
# See https://github.com/spulec/freezegun/issues/469

metadata-ingestion/tests/test_helpers/docker_helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import pytest
66

7-
from datahub.testing.docker_utils import ( # noqa: F401,I250
7+
from datahub.testing.docker_utils import (
88
docker_compose_runner as docker_compose_runner,
99
is_responsive as is_responsive,
1010
wait_for_port as wait_for_port,

metadata-ingestion/tests/unit/bigquery/test_bigqueryv2_usage_source.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ def test_bigqueryv2_filters():
114114
)
115115
OR
116116
protoPayload.metadata.tableDataRead.reason = "JOB"
117-
)""" # noqa: W293
117+
)"""
118118

119119
corrected_start_time = config.start_time - config.max_query_duration
120120
corrected_end_time = config.end_time + config.max_query_duration

0 commit comments

Comments
 (0)