Commit c3f034c7568b9d3b6400836a8d9c5153c0d354a8

Authored by Noah ago
1 parent 47237816c6
Exists in master

단일 컨텐츠를 수동 반영 가능

Showing 3 changed files with 189 additions and 9 deletions Inline Diff

insight/postinsight.py View file @ c3f034c
#!/usr/bin/env python 1 1 #!/usr/bin/env python
# -*- coding: utf-8 -*- 2 2 # -*- coding: utf-8 -*-
3 3
import requests 4 4 import requests
import json 5 5 import json
from url import Url 6 6 from url import Url
7 7
8 8
class PostInsight() : 9 9 class PostInsight() :
10 10
token = "" 11 11 token = ""
type = "" 12 12 type = ""
link = "" 13 13 link = ""
comment = "" 14 14 comment = ""
15 created_time = ""
16 message = ""
data = {} 15 17 data = {}
insight_map = {} 16 18 insight_map = {}
17 19
def __init__(self,token): 18 20 def __init__(self,token):
self.token = token.getToken() 19 21 self.token = token.getToken()
#print "Insight" 20 22 #print "Insight"
21 23
def setContentInsight(self,content_id): 22 24 def setContentInsight(self,content_id):
23 25
url ='https://graph.facebook.com/v2.5/%s?fields=link,comments.limit(5),type,insights{values}'%content_id 24 26 url ='https://graph.facebook.com/v2.5/%s?fields=message,created_time,link,comments.limit(5),type,insights{values}'%content_id
txt = requests.get(url + self.token).text 25 27 txt = requests.get(url + self.token).text
#print url + self.token 26 28 #print url + self.token
self.data = json.loads(txt) 27 29 self.data = json.loads(txt)
self.setContentType(self.data) 28 30 self.setContentType(self.data)
self.setContentComment(self.data) 29 31 self.setContentComment(self.data)
self.setLinkUrl(self.data) 30 32 self.setLinkUrl(self.data)
#print self.data 31 33 #print self.data
try: 32 34 try:
for i in self.data['insights']['data']: 33 35 for i in self.data['insights']['data']:
name = i['id'].split("/")[2] 34 36 name = i['id'].split("/")[2]
values = i['values'][0] 35 37 values = i['values'][0]
self.insight_map[name]= values 36 38 self.insight_map[name]= values
except: 37 39 except:
return 38 40 return
39 41
def getContentInsightByKey(self, key): 40 42 def getContentInsightByKey(self, key):
#subkey is double only 41 43 #subkey is double only
42 44
sub_key_list = key.split(',') 43 45 sub_key_list = key.split(',')
try: 44 46 try:
if len(sub_key_list) == 2: 45 47 if len(sub_key_list) == 2:
ret = self.insight_map[sub_key_list[0]]['value'][sub_key_list[1]] 46 48 ret = self.insight_map[sub_key_list[0]]['value'][sub_key_list[1]]
else : 47 49 else :
ret = self.insight_map[sub_key_list[0]]['value'] 48 50 ret = self.insight_map[sub_key_list[0]]['value']
except: 49 51 except:
return 0 50 52 return 0
51 53
if ret == {} : 52 54 if ret == {} :
return 0 53 55 return 0
54 56
return ret 55 57 return ret
56 58
57 59
def getContentInsightAll(self): 58 60 def getContentInsightAll(self):
return self.insight_map 59 61 return self.insight_map
60 62
63
64 def setContentCreated_time(self, data):
65 try:
66 self.created_time = data['created_time']
67 except:
68 self.created_time = ""
69
70 def setContentMessage(self, data):
71 try:
72 self.message = data['created_time']
73 except:
74 self.message = ""
75
def setContentType(self, data): 61 76 def setContentType(self, data):
try: 62 77 try:
self.type = data['type'] 63 78 self.type = data['type']
except: 64 79 except:
self.type = "" 65 80 self.type = ""
66 81
def setContentComment(self, data): 67 82 def setContentComment(self, data):
try: 68 83 try:
self.comment = Url().getText2bitly(data['comments']['data'][0]['message']) 69 84 self.comment = Url().getText2bitly(data['comments']['data'][0]['message'])
#print self.comment 70 85 #print self.comment
except: 71 86 except:
self.comment = "" 72 87 self.comment = ""
73 88
def setLinkUrl(self, data): 74 89 def setLinkUrl(self, data):
try: 75 90 try:
self.link = data['link'] 76 91 self.link = data['link']
#print self.link 77 92 #print self.link
except: 78 93 except:
self.link = "" 79 94 self.link = ""
80 95
def getLinkUrl(self): 81 96 def getLinkUrl(self):
return self.link 82 97 return self.link
insight/url.py View file @ c3f034c
# -*- coding: utf-8 -*- 1 1 # -*- coding: utf-8 -*-
2 2
import requests 3 3 import requests
from base62 import Base62 4 4 from base62 import Base62
import json 5 5 import json
import re 6 6 import re
7 7
class Url() : 8 8 class Url() :
9 9
server = "http://10.128.0.20/" 10 10 server = "http://10.128.0.20/"
11 11
def url2dic(self, links): 12 12 def url2dic(self, links):
data = [] 13 13 data = []
14 14
if (True): 15 15 if (True):
for link in links: 16 16 for link in links:
17 17
link = link.replace('\n'," ").replace('\r'," ").replace('\'',"") 18 18 link = link.replace('\n'," ").replace('\r'," ").replace('\'',"")
19 19
20 print link
21
if len(link) < 5: 20 22 if len(link) < 5:
continue 21 23 continue
22 24
if self.isdeep(link) : 23 25 if self.isdeep(link) :
24 26
data.append({ 25 27 data.append({
'bitly_url' : "", 26 28 'bitly_url' : "",
'origin_url': str(link), 27 29 'origin_url': str(link),
'bitly_click': "0", 28 30 'bitly_click': "0",
'piki_cid' : str(self.Url2Cid(link)), 29 31 'piki_cid' : str(self.Url2Cid(link)),
'rpiki_click' : "0" 30 32 'rpiki_click' : "0"
}) 31 33 })
32 34
elif self.isrpiki(link) : 33 35 elif self.isrpiki(link) :
print "여기?" 34
print link 35 36 print link
data.append({ 36 37 data.append({
'bitly_url' : "", 37 38 'bitly_url' : "",
'origin_url': str(link), 38 39 'origin_url': str(link),
'bitly_click': "0", 39 40 'bitly_click': "0",
'piki_cid' : str(self.Url2Cid(link)), 40 41 'piki_cid' : str(self.Url2Cid(link)),
'rpiki_click' : str(self.rpiki2click(link)) 41 42 'rpiki_click' : str(self.rpiki2click(link))
}) 42 43 })
43 44
elif self.isbitly(link): 44 45 elif self.isbitly(link):
45
try: 46 46 try:
link = "http://bit.ly/" + re.compile('[^./a-zA-Z0-9]+').sub("",link.split("//bit.ly/")[1].split(" ")[0]) 47 47 link = "http://bit.ly/" + re.compile('[^./a-zA-Z0-9]+').sub("",link.split("//bit.ly/")[1].split(" ")[0])
48 48
link_meta = link + "+" 49 49 link_meta = link + "+"
txt = requests.get(link_meta).text 50 50 txt = requests.get(link_meta).text
51 51
source_tag_op = "\"long_url\": \"" 52 52 source_tag_op = "\"long_url\": \""
source_tag_cl = "\"" 53 53 source_tag_cl = "\""
54 54
clicks_tag_op = "\"user_clicks\": " 55 55 clicks_tag_op = "\"user_clicks\": "
clicks_tag_cl = "," 56 56 clicks_tag_cl = ","
57 57
source_bgn = txt.find(source_tag_op) + len(source_tag_op) 58 58 source_bgn = txt.find(source_tag_op) + len(source_tag_op)
source_end = source_bgn + txt[source_bgn:(source_bgn + 500)].find(source_tag_cl) 59 59 source_end = source_bgn + txt[source_bgn:(source_bgn + 500)].find(source_tag_cl)
clicks_bgn = txt.find(clicks_tag_op) + len(clicks_tag_op) 60 60 clicks_bgn = txt.find(clicks_tag_op) + len(clicks_tag_op)
clicks_end = clicks_bgn + txt[clicks_bgn:(clicks_bgn + 50)].find(clicks_tag_cl) 61 61 clicks_end = clicks_bgn + txt[clicks_bgn:(clicks_bgn + 50)].find(clicks_tag_cl)
62 62
except: 63 63 except:
data.append({'bitly_url' : "",'origin_url': "",'bitly_click': "0",'piki_cid' : "0",'rpiki_click' : "0"}) 64 64 data.append({'bitly_url' : "",'origin_url': "",'bitly_click': "0",'piki_cid' : "0",'rpiki_click' : "0"})
65 65
try: 66 66 try:
piki_url = str(txt[source_bgn:source_end]).split("cid=")[1].split("&")[0] 67 67 piki_url = str(txt[source_bgn:source_end]).split("cid=")[1].split("&")[0]
except: 68 68 except:
piki_url = str(0) 69 69 piki_url = str(0)
70 70
71 print self.rpiki2click(txt[source_bgn:source_end])
72
if self.isrpiki(txt[source_bgn:source_end]) : 71 73 if self.isrpiki(txt[source_bgn:source_end]) :
data.append({ 72 74 data.append({
'bitly_url' : str(link), 73 75 'bitly_url' : str(link),
'origin_url': str(txt[source_bgn:source_end]), 74 76 'origin_url': str(txt[source_bgn:source_end]),
'bitly_click': str(txt[clicks_bgn:clicks_end]), 75 77 'bitly_click': str(txt[clicks_bgn:clicks_end]),
'piki_cid' : str(Base62().decode(piki_url)), 76 78 'piki_cid' : str(Base62().decode(piki_url)),
'rpiki_click' : str(self.rpiki2click(txt[source_bgn:source_end])) 77 79 'rpiki_click' : str(self.rpiki2click(txt[source_bgn:source_end]))
}) 78 80 })
79 81
else: 80 82 else:
#print link 81 83 #print link
data.append({ 82 84 data.append({
'bitly_url' : str(link), 83 85 'bitly_url' : str(link),
'origin_url': str(txt[source_bgn:source_end]), 84 86 'origin_url': str(txt[source_bgn:source_end]),
'bitly_click': str(txt[clicks_bgn:clicks_end]), 85 87 'bitly_click': str(txt[clicks_bgn:clicks_end]),
'piki_cid' : str(Base62().decode(piki_url)), 86 88 'piki_cid' : str(Base62().decode(piki_url)),
'rpiki_click' : "0" 87 89 'rpiki_click' : "0"
}) 88 90 })
89 91
if len(data) == 0: 90 92 if len(data) == 0:
data.append({'bitly_url' : "",'origin_url': "",'bitly_click': "0",'piki_cid' : "0",'rpiki_click' : "0"}) 91 93 data.append({'bitly_url' : "",'origin_url': "",'bitly_click': "0",'piki_cid' : "0",'rpiki_click' : "0"})
92 94
return data 93 95 return data
94 96
95 97
96 98
def Url2Cid(self,url): 97 99 def Url2Cid(self,url):
98 100
try: 99 101 try:
if self.isrpiki(url): 100 102 if self.isrpiki(url):
return Base62().decode(url.split("cid=")[1].split("&")[0]) 101 103 return Base62().decode(url.split("cid=")[1].split("&")[0])
elif self.isdeep(url): 102 104 elif self.isdeep(url):
return requests.get(url).text.split("http://www.pikicast.com/share/")[1].split('"')[0] 103 105 return requests.get(url).text.split("http://www.pikicast.com/share/")[1].split('"')[0]
except : 104 106 except :
return "0" 105 107 return "0"
106 108
def rpiki2click(self,url): 107 109 def rpiki2click(self,url):
#print url 108 110 print url
api = self.server + "contents_RPIKI_api/" 109 111 api = self.server + "contents_RPIKI_api/"
110 112
try: 111 113 try:
fr = url.split("fr=")[1].split("&")[0] 112 114 fr = url.split("fr=")[1].split("&")[0]
except: 113 115 except:
fr = "" 114 116 fr = ""
115
try: 116 117 try:
cid = url.split("cid=")[1].split("&")[0] 117 118 cid = url.split("cid=")[1].split("&")[0]
119 except:
120 cid = ""
121 try:
m = url.split("m=")[1].split("&")[0] 118 122 m = url.split("m=")[1].split("&")[0]
123 except:
124 m = ""
125 try:
c = url.split("c=")[1].split("&")[0] 119 126 c = url.split("c=")[1].split("&")[0]
127 except:
128 c =""
129 try:
v = url.split("v=")[1].split("&")[0] 120 130 v = url.split("v=")[1].split("&")[0]
131 except:
132 v = ""
133 try:
t = url.split("t=")[1].split("&")[0] 121 134 t = url.split("t=")[1].split("&")[0]
135 except:
136 t = ""
137
138 try:
data = json.loads(requests.get(api + cid + '_' + fr + '_' + m + '_' + c + '_' + v + '_' + t).text) 122 139 data = json.loads(requests.get(api + cid + '_' + fr + '_' + m + '_' + c + '_' + v + '_' + t).text)
140
ret = data['data']['real'] 123 141 ret = data['data']['real']
runcontent.py View file @ c3f034c
File was created 1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 from insight.context import Context
5 from insight.token import Token
6 from insight.postinsight import PostInsight
7 from insight.datadb import DataDB
8
9 from insight.url import Url
10 import time
11
12
13 if __name__=='__main__':
14
15 token = Token()
16 contents_list = []
17
18
19 contents_list.append({"id": "467342726674321_1159260310815889"})
20
21
22
23 for content in contents_list:
24 datadb = DataDB()
25 time.sleep(1)
26 p_id = str(content['id'].split('_')[0])
27 c_id = str(content['id'].split('_')[1])
28
29 insight = PostInsight(token)
30 insight.setContentInsight(content['id'])
31
32 created_time = insight.getContentCreated_time()
33 message = insight.getContentMessage()
34 message_url = Url().getText2bitly(message)
35
36 lists =["post_story_adds_unique",
37 "post_story_adds",
38 "post_story_adds_by_action_type_unique,comment",
39 "post_story_adds_by_action_type_unique,like",
40 "post_story_adds_by_action_type_unique,share",
41 "post_story_adds_by_action_type,comment",
42 "post_impressions",
43 "post_impressions_paid_unique",
44 "post_impressions_paid",
45 "post_story_adds_by_action_type,like",
46 "post_story_adds_by_action_type,share",
47 "post_impressions_unique",
48 "post_impressions_organic_unique",
49 "post_impressions_organic",
50 "post_impressions_by_story_type_unique,other",
51 "post_impressions_by_story_type,other",
52 "post_consumptions_by_type_unique,other clicks",
53 "post_consumptions_by_type_unique,photo view",
54 "post_consumptions_by_type_unique,video play",
55 "post_consumptions_by_type_unique,link clicks",
56 "post_consumptions_by_type,other clicks",
57 "post_consumptions_by_type,photo view",
58 "post_consumptions_by_type,video play",
59 "post_consumptions_by_type,link clicks",
60 "post_engaged_users",
61 "post_video_views",
62 "post_video_views_unique",
63 "post_video_views_paid",
64 "post_video_views_autoplayed",
65 "post_video_views_10s",
66 "post_video_views_10s_unique",
67 "post_video_views_10s_paid",
68 "post_video_views_10s_organic",
69 "post_video_views_10s_clicked_to_play",
70 "post_video_views_10s_autoplayed",
71 "post_video_views_10s_sound_on",
72 "post_video_views_sound_on",
73 "post_video_view_time",
74 "post_video_complete_views_organic",
75 "post_video_complete_views_paid"]
76
77 sqlprefix = "insert into facebook_insights2 ("
78 sqlreplace = "REPLACE into facebook_insights2_last ("
79 sqlvalues = " values ("
80
81 sqlprefix += "`p_id`, "
82 sqlprefix += "`c_id`, "
83 sqlprefix += "`type`, "
84 sqlprefix += "`message`, "
85 sqlprefix += "`message_url`, "
86 sqlprefix += "`comment_url`, "
87 sqlprefix += "`created_time`, "
88 sqlprefix += "`loging_time`, "
89
90 sqlprefix += "`bit_url`, "
91 sqlprefix += "`bit_click`, "
92 sqlprefix += "`origin_url`, "
93 sqlprefix += "`piki_cid`, "
94 sqlprefix += "`rpiki_click`, "
95
96 sqlvalues += p_id + ", "
97 sqlvalues += c_id + ", "
98 sqlvalues += "'" + insight.getContentType() + "', "
99 sqlvalues += "'" + message + "', "
100 sqlvalues += "'" + message_url + "', "
101 sqlvalues += "'" + insight.getContentCommentUrl() + "', "
102 sqlvalues += "'" + created_time + "', "