|
3 | 3 | # Distributed under the AGPL license, see LICENSE.txt
|
4 | 4 |
|
5 | 5 | import re
|
| 6 | +import json |
6 | 7 |
|
7 |
| -from .base import BikeShareSystem, BikeShareStation |
8 |
| -from . import utils |
| 8 | +from pybikes import BikeShareSystem, BikeShareStation, PyBikesScraper |
| 9 | +from pybikes.utils import Bounded |
9 | 10 |
|
10 |
| -__all__ = ['Baksi', 'BaksiStation'] |
11 | 11 |
|
12 |
| -ID_NAME_RGX = "([0-9]+)\-([\w\s.()-]+)\'" |
13 |
| -STATUS_RGX = "Durum\ [ ]+\ (\w+)" |
14 |
| -DOCKS_RGX = "Park[ ]+([0-9]+)" |
15 |
| -BIKES_RGX = "Bisiklet[ ]+([0-9]+)" |
16 |
| -LAT_LNG_RGX = "([\s0-9.]+)\',\ \'([\s0-9.]+)" |
17 |
| - |
18 |
| -class Baksi(BikeShareSystem): |
| 12 | +class Baksi(Bounded, BikeShareSystem): |
19 | 13 |
|
20 | 14 | meta = {
|
21 | 15 | 'system': 'Baksi',
|
22 | 16 | 'company': ['Baksi Bike Sharing System']
|
23 | 17 | }
|
24 | 18 |
|
25 |
| - def __init__(self, tag, meta, feed_url): |
26 |
| - super(Baksi, self).__init__(tag, meta) |
| 19 | + def __init__(self, tag, meta, feed_url, bbox=None): |
| 20 | + super(Baksi, self).__init__(tag, meta, bounds=bbox) |
27 | 21 | self.feed_url = feed_url
|
28 | 22 |
|
29 |
| - def update(self, scraper = None): |
30 |
| - if scraper is None: |
31 |
| - scraper = utils.PyBikesScraper() |
32 |
| - |
33 |
| - html_data=scraper.request(self.feed_url, raw = True).decode('iso-8859-9') |
34 |
| - |
35 |
| - # Fetch Data |
36 |
| - id_name = re.findall(ID_NAME_RGX, html_data, re.UNICODE) |
37 |
| - status = re.findall(STATUS_RGX, html_data, re.UNICODE) |
38 |
| - docks = re.findall(DOCKS_RGX, html_data, re.UNICODE) |
39 |
| - bikes = re.findall(BIKES_RGX, html_data, re.UNICODE) |
40 |
| - geopoints = re.findall(LAT_LNG_RGX, html_data, re.UNICODE) |
41 |
| - |
42 |
| - # Refine Output |
43 |
| - station_id, name = zip(*id_name) |
44 |
| - status = ["Active" if out == "Aktif" else "Inactive" for out in status] |
45 |
| - docks = [int(i) for i in docks] |
46 |
| - bikes = [int(i) for i in bikes] |
47 |
| - latitude, longitude = zip(*geopoints) |
| 23 | + def update(self, scraper=None): |
| 24 | + scraper = scraper or PyBikesScraper() |
48 | 25 |
|
49 |
| - fields = zip(station_id, name, status, docks, bikes, latitude, |
50 |
| - longitude) |
| 26 | + html_data = scraper.request(self.feed_url) |
| 27 | + ex_data = re.search(r'var all = (\[.*\]);', html_data).group(1) |
| 28 | + data = json.loads(ex_data) |
51 | 29 |
|
52 |
| - self.stations = list(map(BaksiStation, fields)) |
| 30 | + self.stations = list(map(BaksiStation, data)) |
53 | 31 |
|
54 | 32 |
|
55 | 33 | class BaksiStation(BikeShareStation):
|
56 | 34 | def __init__(self, data):
|
57 | 35 | super(BaksiStation, self).__init__()
|
58 |
| - self.name = data[1] |
59 |
| - self.bikes = data[3] |
60 |
| - self.free = data[4] |
| 36 | + |
| 37 | + uid, name = data[0].split('-', 1) |
| 38 | + |
| 39 | + self.name = name |
61 | 40 | self.latitude = float(data[5])
|
62 | 41 | self.longitude = float(data[6])
|
| 42 | + |
| 43 | + self.bikes = int(re.findall(r'\d+', data[2])[0]) |
| 44 | + self.free = int(re.findall(r'\d+', data[3])[0]) |
| 45 | + |
63 | 46 | self.extra = {
|
64 |
| - 'uid': data[0], |
65 |
| - 'status': data[2], |
66 |
| - 'slots': int(data[3] + data[4]) |
| 47 | + 'uid': uid, |
| 48 | + 'online': 'Aktif' in data[1], |
67 | 49 | }
|
0 commit comments