-
Notifications
You must be signed in to change notification settings - Fork 118
/
Copy pathtest.py
97 lines (81 loc) · 3.09 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import json
import os
from pathlib import Path
from cerberus import Validator as _Validator
import pytest
import bing
import pprint
pp = pprint.PrettyPrinter(indent=4)
# enable cache
bing.BASE_CONFIG["cache"] = os.getenv("SCRAPFLY_CACHE") == "true"
class Validator(_Validator):
def _validate_min_presence(self, min_presence, field, value):
pass # required for adding non-standard keys to schema
def require_min_presence(items, key, min_perc=0.1):
"""check whether dataset contains items with some amount of non-null values for a given key"""
count = sum(1 for item in items if item.get(key))
if count < len(items) * min_perc:
pytest.fail(
f'inadequate presence of "{key}" field in dataset, only {count} out of {len(items)} items have it (expected {min_perc*100}%)'
)
def validate_or_fail(item, validator):
if not validator.validate(item):
pp.pformat(item)
pytest.fail(
f"Validation failed for item: {pp.pformat(item)}\nErrors: {validator.errors}"
)
serp_schema = {
"position": {"type": "integer"},
"title": {"type": "string"},
"url": {"type": "string"},
"origin": {"type": "string"},
"domain": {"type": "string"},
"description": {"type": "string"},
"date": {"type": "string", "nullable": True},
}
keyword_schema = {
"FAQs": {
"type": "list",
"schema": {
"type": "dict",
"schema": {
"query": {"type": "string"},
"answer": {"type": "string"},
"title": {"type": "string"},
"domain": {"type": "string"},
"url": {"type": "string"},
},
},
},
"related_keywords": {"type": "list", "schema": {"type": "string"}},
}
rich_snippets_schema = {
"title": {"type": "string"},
"link": {"type": "string"},
"heading": {"type": "string"},
"descrption": {"type": "string"},
}
@pytest.mark.asyncio
@pytest.mark.flaky(reruns=3, reruns_delay=30)
async def test_serp_scraping():
serp_data = await bing.scrape_search(query="web scraping emails", max_pages=3)
validator = Validator(serp_schema, allow_unknown=True)
for item in serp_data:
validate_or_fail(item, validator)
for k in serp_schema:
require_min_presence(
serp_data, k, min_perc=serp_schema[k].get("min_presence", 0.1)
)
assert len(serp_data) >= 10
if os.getenv("SAVE_TEST_RESULTS") == "true":
(Path(__file__).parent / 'results/serps.json').write_text(json.dumps(serp_data, indent=2, ensure_ascii=False))
@pytest.mark.asyncio
@pytest.mark.flaky(reruns=3, reruns_delay=30)
async def test_keyword_scraping():
keyword_data = await bing.scrape_keywords(query="web scraping emails")
validator = Validator(keyword_schema, allow_unknown=True)
validate_or_fail(keyword_data, validator)
assert len(keyword_data["FAQs"]) >= 1
assert len(keyword_data["related_keywords"]) >= 1
if os.getenv("SAVE_TEST_RESULTS") == "true":
(Path(__file__).parent / 'results/keywords.json').write_text(json.dumps(keyword_data, indent=2, ensure_ascii=False))