diff --git a/pandera/backends/pyspark/container.py b/pandera/backends/pyspark/container.py index c4249cbcb..96742682b 100644 --- a/pandera/backends/pyspark/container.py +++ b/pandera/backends/pyspark/container.py @@ -532,10 +532,7 @@ def _check_uniqueness( :param schema: schema object. :returns: dataframe checked. """ - # Use unique definition of columns as first option - # unique_columns = [col.unique for col in schema.columns.values()] - - # Overwrite it, if schemas's Config class has a unique declaration + # Determine unique columns based on schema's config unique_columns = ( [schema.unique] if isinstance(schema.unique, str) diff --git a/pandera/backends/pyspark/decorators.py b/pandera/backends/pyspark/decorators.py index aa93e5dbd..9c202b4be 100644 --- a/pandera/backends/pyspark/decorators.py +++ b/pandera/backends/pyspark/decorators.py @@ -88,10 +88,6 @@ def _get_check_obj(): Returns: The DataFrame object. """ - if kwargs: - for value in kwargs.values(): - if isinstance(value, pyspark.sql.DataFrame): - return value if args: for value in args: if isinstance(value, pyspark.sql.DataFrame): @@ -109,7 +105,7 @@ def _get_check_obj(): stacklevel=2, ) # If the function was skip, return the `check_obj` value anyway, - # if it's present as a kwarg or an arg + # given that some return value is expected return _get_check_obj() elif scope == ValidationScope.DATA: @@ -124,7 +120,7 @@ def _get_check_obj(): stacklevel=2, ) # If the function was skip, return the `check_obj` value anyway, - # if it's present as a kwarg or an arg + # given that some return value is expected return _get_check_obj() return wrapper