-
Notifications
You must be signed in to change notification settings - Fork 103
/
Copy pathrun.py
60 lines (48 loc) · 2.25 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
"""
This example run script shows how to run the idealista.com scraper defined in ./idealista.py
It scrapes ads data and saves it to ./results/
To run this script set the env variable $SCRAPFLY_KEY with your scrapfly API key:
$ export $SCRAPFLY_KEY="your key from https://scrapfly.io/dashboard"
"""
import asyncio
import json
from pathlib import Path
import idealista
output = Path(__file__).parent / "results"
output.mkdir(exist_ok=True)
async def run():
# enable scrapfly cache for basic use
idealista.BASE_CONFIG["cache"] = True
print("running Idealista scrape and saving results to ./results directory")
search_urls = await idealista.scrape_provinces(
urls = ["https://www.idealista.com/venta-viviendas/almeria-provincia/municipios"]
)
with open(output.joinpath("search_URLs.json"), "w", encoding="utf-8") as file:
json.dump(search_urls, file, indent=2, ensure_ascii=False)
properties_data = await idealista.scrape_properties(
urls=[
"https://www.idealista.com/en/inmueble/98935300/",
"https://www.idealista.com/en/inmueble/102479109/",
"https://www.idealista.com/en/inmueble/102051911/",
"https://www.idealista.com/en/inmueble/99394819/",
"https://www.idealista.com/en/inmueble/102695949/",
]
)
with open(output.joinpath("properties.json"), "w", encoding="utf-8") as file:
json.dump(properties_data, file, indent=2, ensure_ascii=False)
crawl_data = await idealista.crawl_search(
url="https://www.idealista.com/en/venta-viviendas/marbella-malaga/con-chalets/",
# remove the max_scrape_pages paremeter to scrape all pages
max_scrape_pages=2
)
with open(output.joinpath("crawl.json"), "w", encoding="utf-8") as file:
json.dump(crawl_data, file, indent=2, ensure_ascii=False)
search_data = await idealista.scrape_search(
url="https://www.idealista.com/en/venta-viviendas/marbella-malaga/con-chalets/",
# remove the max_scrape_pages paremeter to scrape all pages
max_scrape_pages=3
)
with open(output.joinpath("search_data.json"), "w", encoding="utf-8") as file:
json.dump(search_data, file, indent=2, ensure_ascii=False)
if __name__ == "__main__":
asyncio.run(run())