-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathuntitled0.py
More file actions
211 lines (143 loc) · 4.25 KB
/
untitled0.py
File metadata and controls
211 lines (143 loc) · 4.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 20 11:10:30 2018
@author: heyo
"""
import urllib.request
import json
from pprint import pprint
import requests
def read_web_json(link):
with urllib.request.urlopen(link) as url:
data = json.loads(url.read().decode())
return data
def read_local_json(filename):
with open(filename) as data_file:
data = json.load(data_file)
return data
def get_keys(data):
return data.keys()
def read_html(link):
r = requests.get(link)
html = r.text
return html
def count_line(doc):
num_lines = sum(1 for line in open(doc))
return num_lines
def get_json_links(data):
""" klasördeki json'ın içerisindeki json linklerini çeker"""
json_list = []
for dic in data:
json_list.append(dic['link'])
return json_list
def get_links_in_json(json_list):
""" Çekilen json linklerindeki haberleri çeker """
web_json_list = []
for idx, webdic in enumerate(json_list):
print(str(idx) + "/" + str(len(json_list)) )
web_json = read_web_json(webdic)
for wj in web_json['data']:
web_json_list.append(wj['link'])
return web_json_list
def write_html(link):
for idx, site in enumerate(link):
try:
write_file = open("news" + str(idx) + ".txt" , "w")
html = read_html(site)
write_file.write(html)
write_file.close()
except:
pass
data = read_local_json("2007.json")
data = data['data']
json_links = get_json_links(data)
web_json_links = get_links_in_json(json_links)
"""
from multiprocessing.dummy import Pool as ThreadPool
pool = ThreadPool(16)
result = pool.map(write_html, links)
new_data = data[:20]
"""
""" guid id'ye göre directory oluşturup içerisine json'ı yazıyor """
import os
for val in data:
title = val['guid']
json_link = val['link']
"""
s = list(title)
for idx, cha in enumerate(s):
if cha == "/":
s[idx] = "."
"".join(s)
"""
os.mkdir(title)
os.chdir(title)
read_json = read_web_json(json_link)
write_json = open(str(title) + ".json", "w")
dump_dict = json.dumps(read_json)
write_json.write(dump_dict)
write_json.close()
os.chdir("..")
""" guid ve json'ın içindeki linkleri csv'ye yazdırıyor """
with open('eggs.csv', 'w', newline='') as csvfile:
for idx, val in enumerate(data):
title = val['guid']
json_link = val['link']
"""
s = list(title)
for idx, cha in enumerate(s):
if cha == "/":
s[idx] = "."
"".join(s)
"""
print(str(idx) + "/" + str(len(data)))
read_json = read_web_json(json_link)
for links in read_json['data']:
spamwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow([title, links['link']])
heyo = "asd"
import csv
with open('eggs.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow([heyo])
spamwriter.writerow(['Spam', 'Lovely Spam', 'Wonderful Spam'])
title = "////"
s = list(title)
for idx, cha in enumerate(s):
if cha == "/":
s[idx] = "."
"".join(s)
""" Çekilen json linklerindeki haberleri çeker """
web_json_list = []
new_json_list = json_list[:8]
for webdic in json_list:
web_json = read_web_json(webdic)
for wj in web_json['data']:
web_json_list.append(wj['link'])
file = open("2007_newlink.txt","w")
for link in links:
file.write(link + "\n")
file.close()
for idx, site in enumerate(web_json_list):
try:
write_file = open("news" + str(idx) + ".txt" , "w")
html = read_html(site)
write_file.write(html)
except:
pass
pprint(data.keys())
pprint(data['data'])
new_data = data['data']
pprint(len(new_data))
pprint(new_data[0])
pprint(type(new_data))
first_data = new_data[0]
link = first_data['link']
web_json = get_web_json(link)
l = []
type(l)
type(new_data)
heyo = read_local_json("2007.json")