-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpool_parse.py
More file actions
95 lines (60 loc) · 1.9 KB
/
pool_parse.py
File metadata and controls
95 lines (60 loc) · 1.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# 1 Parse one process
# 2 Time tracker
# 3 Multiprocessing Pool
# 4 Time tracker
# 5 Export in csv
import requests
from bs4 import BeautifulSoup
import csv
from datetime import datetime
from multiprocessing import Pool
def get_html(url):
r = requests.get(url) #respone object
return r.text #return html
def get_all_links(html):
soup = BeautifulSoup(html, 'lxml')
tds = soup.find('table', id='currencies-all').find_all('td', class_='currency-name')
links = []
for td in tds:
a = td.find('a').get('href')
link = 'https://coinmarketcap.com' + a
links.append(link)
return links
def get_page_data(html):
soup = BeautifulSoup(html, 'lxml')
try:
name = soup.find('h1', class_='details-panel-item--name').text.strip() # strip delete symbols
except:
name = ''
try:
price = soup.find('span', class_='details-panel-item--price__value').text.strip()
except:
price = ''
data = {'name': name,
'price': price}
return data
def write_csv(data):
with open('coinmarketcap.csv', 'a') as f:
writer = csv.writer(f)
writer.writerow( (data['name'],
data['price']) )
print(data['name'], 'parsed')
def make_all(url):
html = get_html(url)
data = get_page_data(html)
write_csv(data)
def main():
start = datetime.now()
url = 'https://coinmarketcap.com/all/views/all/'
all_links = get_all_links( get_html(url) )
# for index, url in enumerate(all_links):
# html = get_html(url)
# data = get_page_data(html)
# write_csv(data)
# print(index)
with Pool(40) as p:
p.map(make_all, all_links)
end = datetime.now()
print(str(end - start))
if __name__=='__main__':
main()