Skip to content

Commit

Permalink
added old scrapers
Browse files Browse the repository at this point in the history
  • Loading branch information
raymondlin1 committed May 18, 2020
1 parent a13cfdb commit 8ea58ab
Show file tree
Hide file tree
Showing 1,115 changed files with 198,216 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
*/node_modules/
LEGACY_SCRAPERS/*/node_modules/
libinfo-scraper/*
!libinfo-scraper/
!libinfo-scraper/__pycache__
Expand Down
56 changes: 56 additions & 0 deletions LEGACY_SCRAPERS/busyness/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from .crawler import run
from .crawler import get_populartimes

import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())

"""
ENTRY POINT
"""


def get(api_key, types, p1, p2, n_threads=20, radius=180, all_places=False):
"""
:param api_key: str; api key from google places web service
:param types: [str]; placetypes
:param p1: (float, float); lat/lng of the south-west delimiting point
:param p2: (float, float); lat/lng of the north-east delimiting point
:param n_threads: int; number of threads to use
:param radius: int; meters;
:param all_places: bool; include/exclude places without populartimes
:return: see readme
"""
params = {
"API_key": api_key,
"radius": radius,
"type": types,
"n_threads": n_threads,
"all_places": all_places,
"bounds": {
"lower": {
"lat": min(p1[0], p2[0]),
"lng": min(p1[1], p2[1])
},
"upper": {
"lat": max(p1[0], p2[0]),
"lng": max(p1[1], p2[1])
}
}
}

return run(params)


def get_id(api_key, place_id):
"""
retrieves the current popularity for a given place
:param api_key:
:param place_id:
:return: see readme
"""
return get_populartimes(api_key, place_id)
Binary file not shown.
Loading

0 comments on commit 8ea58ab

Please sign in to comment.