How to use custom_check method in pandera

Best Python code snippet using pandera_python

test_extensions.py

Source: test_extensions.py Github

copy

Full Screen

...17 pd.Series([10, 10, 10]),18 pd.DataFrame([[10, 10, 10], [10, 10, 10]]),19 ],20)21def test_register_vectorized_custom_check(22 custom_check_teardown: None, data: Union[pd.Series, pd.DataFrame]23) -> None:24 """Test registering a vectorized custom check."""25 @extensions.register_check_method(26 statistics=["val"],27 supported_types=(pd.Series, pd.DataFrame),28 check_type="vectorized",29 )30 def custom_check(pandas_obj, *, val):31 return pandas_obj == val32 check = Check.custom_check(val=10)33 check_result = check(data)34 assert check_result.check_passed35 for kwargs in [36 {"element_wise": True},37 {"element_wise": False},38 {"groupby": "column"},39 {"groups": ["group1", "group2"]},40 ]:41 with pytest.warns(UserWarning):42 Check.custom_check(val=10, **kwargs)43 with pytest.raises(44 ValueError,45 match="method with name 'custom_check' already defined",46 ):47 # pylint: disable=function-redefined48 @extensions.register_check_method(statistics=["val"])49 def custom_check(pandas_obj, val): # noqa50 return pandas_obj != val51@pytest.mark.parametrize(52 "data",53 [54 pd.Series([10, 10, 10]),55 pd.DataFrame([[10, 10, 10], [10, 10, 10]]),56 ],57)58def test_register_element_wise_custom_check(59 custom_check_teardown: None, data: Union[pd.Series, pd.DataFrame]60) -> None:61 """Test registering an element-wise custom check."""62 @extensions.register_check_method(63 statistics=["val"],64 supported_types=(pd.Series, pd.DataFrame),65 check_type="element_wise",66 )67 def custom_check(element, *, val):68 return element == val69 check = Check.custom_check(val=10)70 check_result = check(data)71 assert check_result.check_passed72 for kwargs in [73 {"element_wise": True},74 {"element_wise": False},75 {"groupby": "column"},76 {"groups": ["group1", "group2"]},77 ]:78 with pytest.warns(UserWarning):79 Check.custom_check(val=10, **kwargs)80 with pytest.raises(81 ValueError,82 match="Element-wise checks should support DataFrame and Series "83 "validation",84 ):85 @extensions.register_check_method(86 supported_types=pd.Series,87 check_type="element_wise",88 )89 def invalid_custom_check(*args):90 pass91def test_register_custom_groupby_check(custom_check_teardown: None) -> None:92 """Test registering a custom groupby check."""93 @extensions.register_check_method(94 statistics=["group_a", "group_b"],95 supported_types=(pd.Series, pd.DataFrame),96 check_type="groupby",97 )98 def custom_check(dict_groups, *, group_a, group_b):99 """100 Test that the mean values in group A is larger than that of group B.101 Note that this function can handle groups of both dataframes and102 series.103 """104 return (105 dict_groups[group_a].values.mean()106 > dict_groups[group_b].values.mean()107 )108 # column groupby check109 data_column_check = pd.DataFrame(110 {111 "col1": [20, 20, 10, 10],112 "col2": list("aabb"),113 }114 )115 schema_column_check = pa.DataFrameSchema(116 {117 "col1": pa.Column(118 int,119 Check.custom_check(group_a="a", group_b="b", groupby="col2"),120 ),121 "col2": pa.Column(str),122 }123 )124 assert isinstance(schema_column_check(data_column_check), pd.DataFrame)125 # dataframe groupby check126 data_df_check = pd.DataFrame(127 {128 "col1": [20, 20, 10, 10],129 "col2": [30, 30, 5, 5],130 "col3": [10, 10, 1, 1],131 },132 index=pd.Index(list("aabb"), name="my_index"),133 )134 schema_df_check = pa.DataFrameSchema(135 columns={136 "col1": pa.Column(int),137 "col2": pa.Column(int),138 "col3": pa.Column(int),139 },140 index=pa.Index(str, name="my_index"),141 checks=Check.custom_check(142 group_a="a", group_b="b", groupby="my_index"143 ),144 )145 assert isinstance(schema_df_check(data_df_check), pd.DataFrame)146 for kwargs in [{"element_wise": True}, {"element_wise": False}]:147 with pytest.warns(UserWarning):148 Check.custom_check(val=10, **kwargs)149@pytest.mark.parametrize(150 "supported_types",151 [152 1,153 10.0,154 "foo",155 {"foo": "bar"},156 {1: 10},157 ["foo", "bar"],158 [1, 10],159 ("foo", "bar"),160 (1, 10),161 ],162)163def test_register_check_invalid_supported_types(supported_types: Any) -> None:164 """Test that TypeError is raised on invalid supported_types arg."""165 with pytest.raises(TypeError):166 @extensions.register_check_method(supported_types=supported_types)167 def custom_check(*args, **kwargs):168 pass169@pytest.mark.skipif(170 not st.HAS_HYPOTHESIS, reason='needs "strategies" module dependencies'171)172def test_register_check_with_strategy(custom_check_teardown: None) -> None:173 """Test registering a custom check with a data generation strategy."""174 import hypothesis # pylint: disable=import-outside-toplevel,import-error175 def custom_ge_strategy(176 pandas_dtype: DataType,177 strategy: Optional[st.SearchStrategy] = None,178 *,179 min_value: Any,180 ) -> st.SearchStrategy:181 if strategy is None:...

Full Screen

Full Screen

test_custom_check.py

Source: test_custom_check.py Github

copy

Full Screen

1import pytest2import numpy as np3from core.dku_config.custom_check import CustomCheck, CustomCheckError4class TestCustomCheck:5 def test_init(self):6 custom_check = CustomCheck(7 type='exists'8 )9 assert custom_check.type == 'exists'10 with pytest.raises(CustomCheckError):11 _ = CustomCheck(12 type='unknown_type'13 )14 def test_exists(self):15 custom_check = CustomCheck(16 type='exists'17 )18 assert custom_check.run('test') is None19 with pytest.raises(CustomCheckError):20 _ = custom_check.run('')21 with pytest.raises(CustomCheckError):22 _ = custom_check.run([])23 with pytest.raises(CustomCheckError):24 _ = custom_check.run(None)25 def test_in(self):26 custom_check = CustomCheck(27 type='in',28 op=[1, 2, 3]29 )30 assert custom_check.run(1) is None31 with pytest.raises(CustomCheckError):32 _ = custom_check.run(4)33 def test_not_in(self):34 custom_check = CustomCheck(35 type='not_in',36 op=[1, 2, 3]37 )38 assert custom_check.run(4) is None39 with pytest.raises(CustomCheckError):40 _ = custom_check.run(1)41 def test_eq(self):42 custom_check = CustomCheck(43 type='eq',44 op=545 )46 assert custom_check.run(5) is None47 with pytest.raises(CustomCheckError):48 _ = custom_check.run('5')49 with pytest.raises(CustomCheckError):50 _ = custom_check.run(-2)51 with pytest.raises(CustomCheckError):52 _ = custom_check.run(-np.Inf)53 with pytest.raises(CustomCheckError):54 _ = custom_check.run(None)55 def test_sup(self):56 custom_check = CustomCheck(57 type='sup',58 op=559 )60 assert custom_check.run(7) is None61 with pytest.raises(CustomCheckError):62 _ = custom_check.run(3)63 with pytest.raises(CustomCheckError):64 _ = custom_check.run(-2)65 with pytest.raises(CustomCheckError):66 _ = custom_check.run(-np.Inf)67 with pytest.raises(CustomCheckError):68 _ = custom_check.run(5)69 def test_inf(self):70 custom_check = CustomCheck(71 type='inf',72 op=573 )74 assert custom_check.run(2) is None75 with pytest.raises(CustomCheckError):76 _ = custom_check.run(8)77 with pytest.raises(CustomCheckError):78 _ = custom_check.run(np.Inf)79 with pytest.raises(CustomCheckError):80 _ = custom_check.run(5)81 def test_sup_eq(self):82 custom_check = CustomCheck(83 type='sup_eq',84 op=585 )86 assert custom_check.run(7) is None87 assert custom_check.run(5) is None88 with pytest.raises(CustomCheckError):89 _ = custom_check.run(3)90 with pytest.raises(CustomCheckError):91 _ = custom_check.run(-2)92 with pytest.raises(CustomCheckError):93 _ = custom_check.run(-np.Inf)94 def test_inf_eq(self):95 custom_check = CustomCheck(96 type='inf_eq',97 op=598 )99 assert custom_check.run(2) is None100 assert custom_check.run(5) is None101 with pytest.raises(CustomCheckError):102 _ = custom_check.run(8)103 with pytest.raises(CustomCheckError):104 _ = custom_check.run(np.Inf)105 def test_between(self):106 custom_check = CustomCheck(107 type='between',108 op=(3, 8)109 )110 assert custom_check.run(3) is None111 assert custom_check.run(5) is None112 assert custom_check.run(8) is None113 with pytest.raises(CustomCheckError):114 _ = custom_check.run(1)115 with pytest.raises(CustomCheckError):116 _ = custom_check.run(20)117 with pytest.raises(CustomCheckError):118 _ = custom_check.run(-np.Inf)119 with pytest.raises(CustomCheckError):120 _ = custom_check.run(np.Inf)121 def test_between_strict(self):122 custom_check = CustomCheck(123 type='between_strict',124 op=(3, 8)125 )126 assert custom_check.run(5) is None127 with pytest.raises(CustomCheckError):128 _ = custom_check.run(1)129 with pytest.raises(CustomCheckError):130 _ = custom_check.run(20)131 with pytest.raises(CustomCheckError):132 _ = custom_check.run(3)133 with pytest.raises(CustomCheckError):134 _ = custom_check.run(8)135 with pytest.raises(CustomCheckError):136 _ = custom_check.run(-np.Inf)137 with pytest.raises(CustomCheckError):138 _ = custom_check.run(np.Inf)139 def test_is_type(self):140 custom_check_list = CustomCheck(141 type='is_type',142 op=list143 )144 assert custom_check_list.run([1, 2, 3, 4]) is None145 assert custom_check_list.run([]) is None146 with pytest.raises(CustomCheckError):147 _ = custom_check_list.run(None)148 with pytest.raises(CustomCheckError):149 _ = custom_check_list.run("test")150 with pytest.raises(CustomCheckError):151 _ = custom_check_list.run({1, 2, 3})152 custom_check_float = CustomCheck(153 type='is_type',154 op=float155 )156 assert custom_check_float.run(3.4) is None157 with pytest.raises(CustomCheckError):158 _ = custom_check_float.run("test")159 with pytest.raises(CustomCheckError):160 _ = custom_check_float.run(None)161 with pytest.raises(CustomCheckError):162 _ = custom_check_float.run(0)163 with pytest.raises(CustomCheckError):164 _ = custom_check_float.run(4)165 def test_custom(self):166 assert CustomCheck(type='custom', op=1 == 1).run() is None167 assert CustomCheck(type='custom', op=len([1, 2, 3, 4]) == 4).run() is None168 with pytest.raises(CustomCheckError):169 CustomCheck(type='custom', op=3 == 4).run()170 def test_match(self):171 custom_check_match = CustomCheck(172 type='match',173 op=r"^(?:(?:\+|00)33[\s.-]{0,3}(?:\(0\)[\s.-]{0,3})?|0)[1-9](?:(?:[\s.-]?\d{2}){4}|\d{2}(?:[\s.-]?\d{3}){2})$"174 )175 assert custom_check_match.run('0234678956') is None176 with pytest.raises(CustomCheckError):177 _ = custom_check_match.run('abc')178 def test_is_castable(self):179 custom_check_match = CustomCheck(180 type='is_castable',181 op=int182 )183 assert custom_check_match.run(4) is None184 assert custom_check_match.run('4') is None185 assert custom_check_match.run(3.8) is None186 with pytest.raises(CustomCheckError):187 _ = custom_check_match.run('abc')188 with pytest.raises(CustomCheckError):189 _ = custom_check_match.run('3.8')190 def test_is_subset(self):191 custom_check_match = CustomCheck(192 type='is_subset',193 op=[1, 2, 3, "abc"]194 )195 assert custom_check_match.run([1, 2]) is None196 assert custom_check_match.run([1, 1, 2]) is None197 assert custom_check_match.run([]) is None198 assert custom_check_match.run(["abc"]) is None199 with pytest.raises(CustomCheckError):200 _ = custom_check_match.run(["cde"])201 custom_check_match = CustomCheck(202 type='is_subset',203 op=["a", "b", "c"]204 )205 assert custom_check_match.run("ab") is None...

Full Screen

Full Screen

__init__.py

Source: __init__.py Github

copy

Full Screen

...65 :keyword delete_after: How long to wait before deleting the response. Defaults to None (won't delete)66 :return: discord.Message or [?empty] string67 """68 if not custom_check:69 def custom_check(m: discord.Message):70 return m.author == ctx.author and m.channel == ctx.channel71 try:72 resp = await ctx.bot.wait_for("message", check=custom_check, timeout=timeout)73 except asyncio.TimeoutError:74 if silence_timeout_error:75 return ""76 raise77 else:78 if kwargs.get("delete_after") and ctx.channel.permissions_for(ctx.me).manage_messages:79 await resp.delete(delay=kwargs["delete_after"])80 if kwargs.get("return_content"):81 return resp.content82 else:83 return resp...

Full Screen

Full Screen

Blogs

Check out the latest blogs from LambdaTest on this topic:

Keeping Quality Transparency Throughout the organization

In general, software testers have a challenging job. Software testing is frequently the final significant activity undertaken prior to actually delivering a product. Since the terms “software” and “late” are nearly synonymous, it is the testers that frequently catch the ire of the whole business as they try to test the software at the end. It is the testers who are under pressure to finish faster and deem the product “release candidate” before they have had enough opportunity to be comfortable. To make matters worse, if bugs are discovered in the product after it has been released, everyone looks to the testers and says, “Why didn’t you spot those bugs?” The testers did not cause the bugs, but they must bear some of the guilt for the bugs that were disclosed.

Continuous Integration explained with jenkins deployment

Continuous integration is a coding philosophy and set of practices that encourage development teams to make small code changes and check them into a version control repository regularly. Most modern applications necessitate the development of code across multiple platforms and tools, so teams require a consistent mechanism for integrating and validating changes. Continuous integration creates an automated way for developers to build, package, and test their applications. A consistent integration process encourages developers to commit code changes more frequently, resulting in improved collaboration and code quality.

How To Handle Dynamic Dropdowns In Selenium WebDriver With Java

Joseph, who has been working as a Quality Engineer, was assigned to perform web automation for the company’s website.

Why Agile Teams Have to Understand How to Analyze and Make adjustments

How do we acquire knowledge? This is one of the seemingly basic but critical questions you and your team members must ask and consider. We are experts; therefore, we understand why we study and what we should learn. However, many of us do not give enough thought to how we learn.

Fault-Based Testing and the Pesticide Paradox

In some sense, testing can be more difficult than coding, as validating the efficiency of the test cases (i.e., the ‘goodness’ of your tests) can be much harder than validating code correctness. In practice, the tests are just executed without any validation beyond the pass/fail verdict. On the contrary, the code is (hopefully) always validated by testing. By designing and executing the test cases the result is that some tests have passed, and some others have failed. Testers do not know much about how many bugs remain in the code, nor about their bug-revealing efficiency.

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run pandera automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful