Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switching to Ruff for formatting and linting #847

Merged
merged 4 commits into from
Nov 19, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 0 additions & 20 deletions .pre-commit-config.yaml

This file was deleted.

1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
- Prepare for python typing deprecations ([837](https://github.com/databricks/dbt-databricks/pull/837))
- Fix behavior flag use in init of DatabricksAdapter (thanks @VersusFacit!) ([836](https://github.com/databricks/dbt-databricks/pull/836))
- Restrict pydantic to V1 per dbt Labs' request ([843](https://github.com/databricks/dbt-databricks/pull/843))
- Switching to Ruff for formatting and linting ([847](https://github.com/databricks/dbt-databricks/pull/847))

## dbt-databricks 1.8.7 (October 10, 2024)

Expand Down
4 changes: 0 additions & 4 deletions black.ini

This file was deleted.

2 changes: 0 additions & 2 deletions dbt/adapters/databricks/api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,6 @@ def get_folder(self, _: str, schema: str) -> str:


class CurrUserApi(DatabricksApi):

def __init__(self, session: Session, host: str):
super().__init__(session, host, "/api/2.0/preview/scim/v2")
self._user = ""
Expand Down Expand Up @@ -401,7 +400,6 @@ def get(self, job_id: str) -> dict[str, Any]:


class WorkflowJobApi(DatabricksApi):

def __init__(self, session: Session, host: str):
super().__init__(session, host, "/api/2.1/jobs")

Expand Down
1 change: 0 additions & 1 deletion dbt/adapters/databricks/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -868,7 +868,6 @@ def cleanup_all(self) -> None:
def _update_compute_connection(
self, conn: DatabricksDBTConnection, new_name: str
) -> DatabricksDBTConnection:

if conn.name == new_name and conn.state == ConnectionState.OPEN:
# Found a connection and nothing to do, so just return it
return conn
Expand Down
5 changes: 2 additions & 3 deletions dbt/adapters/databricks/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@
from dbt.adapters.databricks.relation_configs.tblproperties import TblPropertiesConfig
from dbt.adapters.databricks.utils import get_first_row, handle_missing_objects
from dbt.adapters.databricks.utils import redact_credentials
from dbt.adapters.databricks.utils import undefined_proof
from dbt.adapters.relation_configs import RelationResults
from dbt.adapters.spark.impl import DESCRIBE_TABLE_EXTENDED_MACRO_NAME
from dbt.adapters.spark.impl import GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME
Expand Down Expand Up @@ -165,7 +164,6 @@ def get_identifier_list_string(table_names: set[str]) -> str:
return _identifier


@undefined_proof
class DatabricksAdapter(SparkAdapter):
INFORMATION_COMMENT_REGEX = re.compile(r"Comment: (.*)\n[A-Z][A-Za-z ]+:", re.DOTALL)

Expand Down Expand Up @@ -364,7 +362,8 @@ def _get_hive_relations(
new_rows: list[tuple[str, Optional[str]]]
if all([relation.database, relation.schema]):
tables = self.connections.list_tables(
database=relation.database, schema=relation.schema # type: ignore[arg-type]
database=relation.database, # type: ignore[arg-type]
schema=relation.schema, # type: ignore[arg-type]
)

new_rows = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,6 @@ def __init__(
self.additional_job_settings = parsed_model.config.python_job_config.dict()

def compile(self, path: str) -> PythonJobDetails:

job_spec: dict[str, Any] = {
"task_key": "inner_notebook",
"notebook_task": {
Expand Down
38 changes: 0 additions & 38 deletions dbt/adapters/databricks/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from collections.abc import Callable
import functools
import inspect
import re
from typing import Any
from typing import TYPE_CHECKING
Expand Down Expand Up @@ -45,42 +43,6 @@ def remove_undefined(v: Any) -> Any:
return None if isinstance(v, Undefined) else v


def undefined_proof(cls: type[A]) -> type[A]:
for name in cls._available_:
func = getattr(cls, name)
if not callable(func):
continue
try:
static_attr = inspect.getattr_static(cls, name)
isstatic = isinstance(static_attr, staticmethod)
isclass = isinstance(static_attr, classmethod)
except AttributeError:
isstatic = False
isclass = False
wrapped_function = _wrap_function(func.__func__ if isclass else func)
setattr(
cls,
name,
(
staticmethod(wrapped_function)
if isstatic
else classmethod(wrapped_function) if isclass else wrapped_function
),
)

return cls


def _wrap_function(func: Callable) -> Callable:
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = [remove_undefined(arg) for arg in args]
new_kwargs = {key: remove_undefined(value) for key, value in kwargs.items()}
return func(*new_args, **new_kwargs)

return wrapper


def remove_ansi(line: str) -> str:
ansi_escape = re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]")
return ansi_escape.sub("", line)
Expand Down
2 changes: 2 additions & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
ruff

black~=24.3.0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we remove old formatters here?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch.

flake8
flaky
Expand Down
6 changes: 6 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
line-length = 100
target-version = 'py39'

[lint]
select = ["E", "W", "F"]
ignore = ["E203"]
1 change: 0 additions & 1 deletion tests/functional/adapter/columns/test_get_columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ def setup(self, project):

@pytest.fixture(scope="class")
def expected_columns(self):

return [
DatabricksColumn(
column="struct_col",
Expand Down
5 changes: 2 additions & 3 deletions tests/unit/macros/relations/test_table_macros.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,8 @@ def context(self, template) -> dict:

def render_create_table_as(self, template_bundle, temporary=False, sql="select 1"):
external_path = f"/mnt/root/{template_bundle.relation.identifier}"
template_bundle.template.globals["adapter"].compute_external_path.return_value = (
external_path
)
adapter_mock = template_bundle.template.globals["adapter"]
adapter_mock.compute_external_path.return_value = external_path
return self.run_macro(
template_bundle.template,
"databricks__create_table_as",
Expand Down
8 changes: 4 additions & 4 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ envlist = linter, unit
[testenv:linter]
basepython = python3
commands =
{envpython} -m black --config black.ini --check dbt tests
{envpython} -m flake8 --select=E,W,F --ignore=E203,W503 --max-line-length=100 dbt tests
{envpython} -m ruff format --check
{envpython} -m ruff check
{envpython} -m mypy --config-file mypy.ini --explicit-package-bases dbt tests
passenv =
DBT_*
Expand All @@ -15,9 +15,9 @@ deps =
-r{toxinidir}/dev-requirements.txt
-r{toxinidir}/requirements.txt

[testenv:black]
[testenv:format]
basepython = python3
commands = {envpython} -m black --config black.ini .
commands = {envpython} -m ruff format
passenv =
DBT_*
PYTEST_ADDOPTS
Expand Down
Loading