Skip to content

Commit

Permalink
chore(py): add unused-noqa lint rule (#12615)
Browse files Browse the repository at this point in the history
  • Loading branch information
hsheth2 authored Feb 13, 2025
1 parent 2d762f0 commit 79f844b
Show file tree
Hide file tree
Showing 32 changed files with 42 additions and 35 deletions.
1 change: 1 addition & 0 deletions metadata-ingestion-modules/airflow-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"RUF100", # unused-noqa
]
ignore = [
"E501", # Line length violations (handled by formatter)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
# To placate mypy on Airflow versions that don't have the listener API,
# we define a dummy hookimpl that's an identity function.

def hookimpl(f: _F) -> _F: # type: ignore[misc] # noqa: F811
def hookimpl(f: _F) -> _F: # type: ignore[misc]
return f

else:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pathlib
import site

from datahub.testing.pytest_hooks import ( # noqa: F401,E402
from datahub.testing.pytest_hooks import ( # noqa: F401
load_golden_flags,
pytest_addoption,
)
Expand Down
1 change: 1 addition & 0 deletions metadata-ingestion-modules/dagster-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"RUF100", # unused-noqa
]
ignore = [
"E501", # Line length violations (handled by formatter)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datahub.testing.pytest_hooks import ( # noqa: F401,E402
from datahub.testing.pytest_hooks import ( # noqa: F401
load_golden_flags,
pytest_addoption,
)
1 change: 1 addition & 0 deletions metadata-ingestion-modules/gx-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"RUF100", # unused-noqa
]
ignore = [
"E501", # Line length violations (handled by formatter)
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion-modules/gx-plugin/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datahub.testing.docker_utils import docker_compose_runner # noqa: F401
from datahub.testing.pytest_hooks import ( # noqa: F401,E402
from datahub.testing.pytest_hooks import ( # noqa: F401
load_golden_flags,
pytest_addoption,
)
1 change: 1 addition & 0 deletions metadata-ingestion-modules/prefect-plugin/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"RUF100", # unused-noqa
]
ignore = [
"E501", # Line length violations (handled by formatter)
Expand Down
3 changes: 3 additions & 0 deletions metadata-ingestion/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ extend-select = [
"G010", # logging.warn -> logging.warning
"I", # isort
"TID", # flake8-tidy-imports
"RUF100", # unused-noqa
]
extend-ignore = [
"E501", # Handled by formatter
Expand All @@ -38,6 +39,8 @@ extend-ignore = [
"E203", # Ignore whitespace before ':' (matches Black)
"B019", # Allow usages of functools.lru_cache
"B008", # Allow function call in argument defaults
"RUF012", # mutable-class-default; incompatible with pydantic
"RUF015", # unnecessary-iterable-allocation-for-first-element
# TODO: Enable these later
"B006", # Mutable args
"B017", # Do not assert blind exception
Expand Down
4 changes: 2 additions & 2 deletions metadata-ingestion/src/datahub/cli/docker_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def detect_quickstart_arch(arch: Optional[str]) -> Architectures:
return quickstart_arch


@docker.command() # noqa: C901
@docker.command()
@click.option(
"--version",
type=str,
Expand Down Expand Up @@ -592,7 +592,7 @@ def detect_quickstart_arch(arch: Optional[str]) -> Architectures:
"arch",
]
)
def quickstart( # noqa: C901
def quickstart(
version: Optional[str],
build_locally: bool,
pull_images: bool,
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/configuration/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from pydantic.fields import Field
from typing_extensions import Protocol, Self

from datahub.configuration._config_enum import ConfigEnum as ConfigEnum # noqa: I250
from datahub.configuration._config_enum import ConfigEnum as ConfigEnum
from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2
from datahub.utilities.dedup_list import deduplicate_list

Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/ingestion/api/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Callable, Dict, Optional, Type

from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.api.source import ( # noqa: I250
from datahub.ingestion.api.source import (
Source,
SourceCapability as SourceCapability,
)
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/ingestion/graph/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.rest_emitter import DatahubRestEmitter
from datahub.emitter.serialization_helper import post_json_transform
from datahub.ingestion.graph.config import ( # noqa: I250; TODO: Remove this alias
from datahub.ingestion.graph.config import (
DatahubClientConfig as DatahubClientConfig,
)
from datahub.ingestion.graph.connections import (
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/ingestion/run/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def _time_to_print(self) -> bool:
return True
return False

def run(self) -> None: # noqa: C901
def run(self) -> None:
with contextlib.ExitStack() as stack:
if self.config.flags.generate_memory_profiles:
import memray
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -923,15 +923,15 @@ def from_dict(
tags=cast(List, dict.get("tags")) if dict.get("tags") is not None else [],
)

@classmethod # noqa: C901
@classmethod
def from_api( # noqa: C901
cls,
model: str,
explore_name: str,
client: LookerAPI,
reporter: SourceReport,
source_config: LookerDashboardSourceConfig,
) -> Optional["LookerExplore"]: # noqa: C901
) -> Optional["LookerExplore"]:
try:
explore = client.lookml_model_explore(model, explore_name)
views: Set[str] = set()
Expand Down Expand Up @@ -1183,7 +1183,7 @@ def _get_embed_url(self, base_url: str) -> str:
base_url = remove_port_from_url(base_url)
return f"{base_url}/embed/explore/{self.model_name}/{self.name}"

def _to_metadata_events( # noqa: C901
def _to_metadata_events(
self,
config: LookerCommonConfig,
reporter: SourceReport,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ def add_reachable_explore(self, model: str, explore: str, via: str) -> None:

self.reachable_explores[(model, explore)].append(via)

def _get_looker_dashboard_element( # noqa: C901
def _get_looker_dashboard_element(
self, element: DashboardElement
) -> Optional[LookerDashboardElement]:
# Dashboard elements can use raw usage_queries against explores
Expand Down
4 changes: 2 additions & 2 deletions metadata-ingestion/src/datahub/ingestion/source/nifi.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ def rest_api_base_url(self):
def get_report(self) -> SourceReport:
return self.report

def update_flow(self, pg_flow_dto: Dict, recursion_level: int = 0) -> None: # noqa: C901
def update_flow(self, pg_flow_dto: Dict, recursion_level: int = 0) -> None:
"""
Update self.nifi_flow with contents of the input process group `pg_flow_dto`
"""
Expand Down Expand Up @@ -894,7 +894,7 @@ def delete_provenance(self, provenance_uri):
if not delete_response.ok:
logger.error("failed to delete provenance ", provenance_uri)

def construct_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901
def construct_workunits(self) -> Iterable[MetadataWorkUnit]:
rootpg = self.nifi_flow.root_process_group
flow_name = rootpg.name # self.config.site_name
flow_urn = self.make_flow_urn()
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/ingestion/source/openapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def build_wu(
mce = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
return ApiWorkUnit(id=dataset_name, mce=mce)

def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901
def get_workunits_internal(self) -> Iterable[ApiWorkUnit]:
config = self.config

sw_dict = self.config.get_swagger()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def check_sw_version(sw_dict: dict) -> None:
)


def get_endpoints(sw_dict: dict) -> dict: # noqa: C901
def get_endpoints(sw_dict: dict) -> dict:
"""
Get all the URLs, together with their description and the tags
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class CatalogItem(BaseModel):
)

@validator("display_name", always=True)
def validate_diplay_name(cls, value, values): # noqa: N805
def validate_diplay_name(cls, value, values):
if values["created_by"]:
return values["created_by"].split("\\")[-1]
return ""
Expand Down
4 changes: 2 additions & 2 deletions metadata-ingestion/src/datahub/ingestion/source/sql/athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
except ImportError:
_F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])

def override(f: _F, /) -> _F: # noqa: F811
def override(f: _F, /) -> _F:
return f


Expand Down Expand Up @@ -104,7 +104,7 @@ def get_view_definition(self, connection, view_name, schema=None, **kw):
return "\n".join([r for r in res])

@typing.no_type_check
def _get_column_type(self, type_: Union[str, Dict[str, Any]]) -> TypeEngine: # noqa: C901
def _get_column_type(self, type_: Union[str, Dict[str, Any]]) -> TypeEngine:
"""Derives the data type of the Athena column.
This method is overwritten to extend the behavior of PyAthena.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@


class HiveMetastoreConfigMode(StrEnum):
hive: str = "hive" # noqa: F811
hive: str = "hive"
presto: str = "presto"
presto_on_hive: str = "presto-on-hive"
trino: str = "trino"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ def loop_job_steps(
data_job.add_property(name=data_name, value=str(data_value))
yield from self.construct_job_workunits(data_job)

def loop_stored_procedures( # noqa: C901
def loop_stored_procedures(
self,
inspector: Inspector,
schema: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ def make_data_reader(self, inspector: Inspector) -> Optional[DataReader]:

return None

def loop_tables( # noqa: C901
def loop_tables(
self,
inspector: Inspector,
schema: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ def __init__(self, config: TeradataConfig, ctx: PipelineContext):
)

# Disabling the below because the cached view definition is not the view definition the column in tablesv actually holds the last statement executed against the object... not necessarily the view definition
# setattr( # noqa: B010
# setattr(
# TeradataDialect,
# "get_view_definition",
# lambda self, connection, view_name, schema=None, **kw: optimized_get_view_definition(
Expand Down Expand Up @@ -746,7 +746,7 @@ def get_db_name(self, inspector: Inspector) -> str:
else:
raise Exception("Unable to get database name from Sqlalchemy inspector")

def cached_loop_tables( # noqa: C901
def cached_loop_tables(
self,
inspector: Inspector,
schema: str,
Expand Down Expand Up @@ -782,7 +782,7 @@ def cached_get_table_properties(
break
return description, properties, location

def cached_loop_views( # noqa: C901
def cached_loop_views(
self,
inspector: Inspector,
schema: str,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from datahub.ingestion.source.bigquery_v2.bigquery_audit import BigqueryTableIdentifier
from datahub.metadata.schema_classes import SchemaFieldClass, SchemaMetadataClass
from datahub.metadata.urns import DataPlatformUrn
from datahub.sql_parsing._models import _TableName as _TableName # noqa: I250
from datahub.sql_parsing._models import _TableName as _TableName
from datahub.sql_parsing.sql_parsing_common import PLATFORMS_WITH_CASE_SENSITIVE_TABLES
from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict
from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ def _create_table_ddl_cll(
return column_lineage


def _select_statement_cll( # noqa: C901
def _select_statement_cll(
statement: _SupportedColumnLineageTypes,
dialect: sqlglot.Dialect,
root_scope: sqlglot.optimizer.Scope,
Expand Down
4 changes: 2 additions & 2 deletions metadata-ingestion/src/datahub/upgrade/upgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,9 @@ def is_client_server_compatible(client: VersionStats, server: VersionStats) -> i
return server.version.micro - client.version.micro


def _maybe_print_upgrade_message( # noqa: C901
def _maybe_print_upgrade_message(
version_stats: Optional[DataHubVersionStats],
) -> None: # noqa: C901
) -> None:
days_before_cli_stale = 7
days_before_quickstart_stale = 7

Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/src/datahub/utilities/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def __init__(
self.owner_source_type = owner_source_type
self.match_nested_props = match_nested_props

def process(self, raw_props: Mapping[str, Any]) -> Dict[str, Any]: # noqa: C901
def process(self, raw_props: Mapping[str, Any]) -> Dict[str, Any]:
# Defining the following local variables -
# operations_map - the final resulting map when operations are processed.
# Against each operation the values to be applied are stored.
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
except ImportError:
pass

import freezegun # noqa: F401,E402
import freezegun # noqa: E402

# The freezegun library has incomplete type annotations.
# See https://github.com/spulec/freezegun/issues/469
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/tests/test_helpers/docker_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest

from datahub.testing.docker_utils import ( # noqa: F401,I250
from datahub.testing.docker_utils import (
docker_compose_runner as docker_compose_runner,
is_responsive as is_responsive,
wait_for_port as wait_for_port,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def test_bigqueryv2_filters():
)
OR
protoPayload.metadata.tableDataRead.reason = "JOB"
)""" # noqa: W293
)"""

corrected_start_time = config.start_time - config.max_query_duration
corrected_end_time = config.end_time + config.max_query_duration
Expand Down

0 comments on commit 79f844b

Please sign in to comment.