Skip to content

Commit

Permalink
Fix disabling validation for PySpark DataFrame Schemas (#1407)
Browse files Browse the repository at this point in the history
* Update container.py

Signed-off-by: Maximilian Speicher <[email protected]>

* fix existing test

Signed-off-by: Maximilian Speicher <[email protected]>

---------

Signed-off-by: Maximilian Speicher <[email protected]>
  • Loading branch information
maxispeicher authored Nov 11, 2023
1 parent db356dc commit adf0494
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pandera/api/pyspark/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ def validate(
[Row(product='Bread', price=9), Row(product='Butter', price=15)]
"""
if not CONFIG.validation_enabled:
return
return check_obj
error_handler = ErrorHandler(lazy)

return self._validate(
Expand Down
4 changes: 2 additions & 2 deletions tests/pyspark/test_pyspark_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ class TestSchema(DataFrameModel):
}

assert CONFIG.dict() == expected
assert pandra_schema.validate(input_df) is None
assert TestSchema.validate(input_df) is None
assert pandra_schema.validate(input_df)
assert TestSchema.validate(input_df)

# pylint:disable=too-many-locals
def test_schema_only(self, spark, sample_spark_schema):
Expand Down

0 comments on commit adf0494

Please sign in to comment.