diff --git a/pandera/api/pyspark/container.py b/pandera/api/pyspark/container.py index cef8675c9..880c1dd1b 100644 --- a/pandera/api/pyspark/container.py +++ b/pandera/api/pyspark/container.py @@ -327,7 +327,7 @@ def validate( [Row(product='Bread', price=9), Row(product='Butter', price=15)] """ if not CONFIG.validation_enabled: - return + return check_obj error_handler = ErrorHandler(lazy) return self._validate( diff --git a/tests/pyspark/test_pyspark_config.py b/tests/pyspark/test_pyspark_config.py index 6005c0c16..82edb7231 100644 --- a/tests/pyspark/test_pyspark_config.py +++ b/tests/pyspark/test_pyspark_config.py @@ -44,8 +44,8 @@ class TestSchema(DataFrameModel): } assert CONFIG.dict() == expected - assert pandra_schema.validate(input_df) is None - assert TestSchema.validate(input_df) is None + assert pandra_schema.validate(input_df) + assert TestSchema.validate(input_df) # pylint:disable=too-many-locals def test_schema_only(self, spark, sample_spark_schema):