Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use generic types for config objects. #1325

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions pandera/api/pandas/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import os
import warnings
from pathlib import Path
from typing import Any, Dict, List, Optional, Union, cast, overload
from typing import Any, Dict, List, Optional, Sequence, Union, cast, overload

import pandas as pd

Expand Down Expand Up @@ -47,7 +47,7 @@ def __init__(
strict: StrictType = False,
name: Optional[str] = None,
ordered: bool = False,
unique: Optional[Union[str, List[str]]] = None,
unique: Optional[Union[str, Sequence[str]]] = None,
report_duplicates: UniqueSettings = "all",
unique_column_names: bool = False,
add_missing_columns: bool = False,
Expand Down Expand Up @@ -188,12 +188,12 @@ def coerce(self, value: bool) -> None:
self._coerce = value

@property
def unique(self):
def unique(self) -> Optional[Sequence[str]]:
"""List of columns that should be jointly unique."""
return self._unique

@unique.setter
def unique(self, value: Optional[Union[str, List[str]]]) -> None:
def unique(self, value: Optional[Union[str, Sequence[str]]]) -> None:
"""Set unique attribute."""
self._unique = [value] if isinstance(value, str) else value

Expand Down
8 changes: 4 additions & 4 deletions pandera/api/pandas/model_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Class-based dataframe model API configuration for pandas."""

from typing import Any, Callable, Dict, List, Optional, Union
from typing import Any, Callable, Mapping, Optional, Sequence, Union

from pandera.api.base.model_config import BaseModelConfig
from pandera.api.pandas.types import PandasDtypeInputTypes, StrictType
Expand All @@ -24,7 +24,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
drop_invalid_rows: bool = False #: drop invalid rows on validation

#: make sure certain column combinations are unique
unique: Optional[Union[str, List[str]]] = None
unique: Optional[Union[str, Sequence[str]]] = None

#: make sure all specified columns are in the validated dataframe -
#: if ``"filter"``, removes columns not specified in the schema
Expand Down Expand Up @@ -61,7 +61,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
#: converts the object of type ``from_format`` to a pandera-validate-able
#: data structure. The reader function is implemented in the pandera.typing
#: generic types via the ``from_format`` and ``to_format`` methods.
from_format_kwargs: Optional[Dict[str, Any]] = None
from_format_kwargs: Optional[Mapping[str, Any]] = None

#: data format to serialize into after validation. This option only applies
#: to schemas used in the context of the pandera type constructor
Expand All @@ -76,7 +76,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
#: converts the pandera-validate-able object to type ``to_format``.
#: The writer function is implemented in the pandera.typing
#: generic types via the ``from_format`` and ``to_format`` methods.
to_format_kwargs: Optional[Dict[str, Any]] = None
to_format_kwargs: Optional[Mapping[str, Any]] = None

#: a dictionary object to store key-value data at schema level
metadata: Optional[dict] = None
8 changes: 4 additions & 4 deletions pandera/api/pyspark/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import os
import warnings
from pathlib import Path
from typing import Any, Dict, List, Optional, Union, cast, overload
from typing import Any, Dict, List, Optional, Sequence, Union, cast, overload

from pyspark.sql import DataFrame

Expand Down Expand Up @@ -40,7 +40,7 @@ def __init__(
strict: StrictType = False,
name: Optional[str] = None,
ordered: bool = False,
unique: Optional[Union[str, List[str]]] = None,
unique: Optional[Union[str, Sequence[str]]] = None,
report_duplicates: UniqueSettings = "all",
unique_column_names: bool = False,
title: Optional[str] = None,
Expand Down Expand Up @@ -169,12 +169,12 @@ def coerce(self, value: bool) -> None:
self._coerce = value

@property
def unique(self):
def unique(self) -> Optional[Sequence[str]]:
"""List of columns that should be jointly unique."""
return self._unique

@unique.setter
def unique(self, value: Optional[Union[str, List[str]]]) -> None:
def unique(self, value: Optional[Union[str, Sequence[str]]]) -> None:
"""Set unique attribute."""
self._unique = [value] if isinstance(value, str) else value

Expand Down
8 changes: 4 additions & 4 deletions pandera/api/pyspark/model_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Class-based dataframe model API configuration for pyspark."""

from typing import Any, Callable, Dict, List, Optional, Union
from typing import Any, Callable, Mapping, Optional, Sequence, Union

from pandera.api.base.model_config import BaseModelConfig
from pandera.api.pyspark.types import PySparkDtypeInputTypes, StrictType
Expand All @@ -23,7 +23,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
coerce: bool = False #: coerce types of all schema components

#: make sure certain column combinations are unique
unique: Optional[Union[str, List[str]]] = None
unique: Optional[Union[str, Sequence[str]]] = None

#: make sure all specified columns are in the validated dataframe -
#: if ``"filter"``, removes columns not specified in the schema
Expand All @@ -44,7 +44,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
#: converts the object of type ``from_format`` to a pandera-validate-able
#: data structure. The reader function is implemented in the pandera.typing
#: generic types via the ``from_format`` and ``to_format`` methods.
from_format_kwargs: Optional[Dict[str, Any]] = None
from_format_kwargs: Optional[Mapping[str, Any]] = None

#: data format to serialize into after validation. This option only applies
#: to schemas used in the context of the pandera type constructor
Expand All @@ -59,7 +59,7 @@ class BaseConfig(BaseModelConfig): # pylint:disable=R0903
#: converts the pandera-validate-able object to type ``to_format``.
#: The writer function is implemented in the pandera.typing
#: generic types via the ``from_format`` and ``to_format`` methods.
to_format_kwargs: Optional[Dict[str, Any]] = None
to_format_kwargs: Optional[Mapping[str, Any]] = None

#: a dictionary object to store key-value data at schema level
metadata: Optional[dict] = None