本文主要是介绍微信统计数据的获取及存储,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
目录:
1、需求:
2、开发前期准备:
3、代码结构:
4、Mysql数据表设计:
5、代码展示:
6、结果展示:
———————————————————————————————-
1、需求:
熟悉获取微信统计数据的接口,并设计数据获取方案,微信数据接口文档地址:https://mp.weixin.qq.com/wiki/15/88726a421bfc54654a3095821c3ca3bb.html
2、开发前期准备:
1、ACCESS_TOKEN:获取微信唯一标识ACCESS_TOKEN,同时也是公众号的全局唯一票据
2、需要公司公众号的AppID和AppSecret从而得到ACCESS_TOKEN
3、需要Mysql的建表权限
4、学习步鄹:weiXinEntry.py–>DataAnalysis.py–>MysqlUtils.py
3、代码结构:
4、Mysql数据表设计:
WeiXinSQL
CREATE TABLE getUser(id int not null auto_increment,datasource VARCHAR(45) null,ref_date VARCHAR(45) null,user_source INT NULL,new_user INT NULL,cancel_user INT NULL,cumulate_user INT NULL,primary key (id,datasource))ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getUser` (`datasource`, `ref_date`, `user_source`, `new_user` , `cancel_user` , `cumulate_user`)
VALUES ('getusersummary','2016-12-02',0,0,0,0)CREATE TABLE getArticle(id int not null auto_increment,datasource VARCHAR(45) null,ref_date VARCHAR(45) null,ref_hour INT NULL,stat_date VARCHAR(45) null,msgid VARCHAR(45) null,title VARCHAR(45) null,int_page_read_user INT NULL,int_page_read_count INT NULL,ori_page_read_user INT NULL,ori_page_read_count INT NULL,share_scene INT NULL,share_user INT NULL,share_count INT NULL,add_to_fav_user INT NULL,add_to_fav_count INT NULL,target_user INT NULL,user_source INT NULL,primary key (id,datasource))ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`ref_hour`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`,`ori_page_read_count`,`share_scene`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`,`user_source`)
VALUES ('getuserreadhour','','2017-01-03',1500,'0','0',1,2,0,0,0,0,0,0,0,0,5)CREATE TABLE getInterface(id int not null auto_increment,datasource VARCHAR(45) null,ref_date VARCHAR(45) null,ref_hour INT NULL,callback_count INT NULL,fail_count INT NULL,total_time_cost INT NULL,max_time_cost INT NULL,primary key (id,datasource))ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getInterface` (`datasource`,`ref_date`,`ref_hour`,`callback_count`,`fail_count`,`total_time_cost`,`max_time_cost`)
VALUES ('getinterfacesummary','2017-01-03',0,3,0,950,340)CREATE TABLE getupStreammsg(id int not null auto_increment,datasource VARCHAR(45) null,ref_date VARCHAR(45) null,ref_hour INT NULL,msg_type INT NULL,msg_user INT NULL,msg_count INT NULL,count_interval INT NULL,int_page_read_count INT NULL,ori_page_read_user INT NULL,primary key (id,datasource))ENGINE=InnoDB DEFAULT CHARSET=utf8;INSERT INTO `getupStreammsg` (`datasource`,`ref_date`,`ref_hour`,`msg_type`,`msg_user`,`msg_count`,`count_interval`,`int_page_read_count`,`ori_page_read_user`)
VALUES ('getupstreammsg','2016-12-01',0,1,1,101,0,0,0)
5、代码展示:
weiXinEntry.py
#!/usr/bin/python
# -*- coding: UTF-8 -*-import datetime #导入日期时间模块
import json
import urllib2import DataAnalysisdef get_content(url):'获取网页内容'html=urllib2.urlopen(url)content=html.read()html.close()return contentdef getAccess_token(AppID,AppSecret):'获取微信唯一标识ACCESS_TOKEN,access_token是公众号的全局唯一票据'url="https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid="+AppID+"&secret="+AppSecretinfo=get_content(url)access_token=json.loads(info)["access_token"]return access_tokenif __name__ == "__main__":#公司公众号AppID="公司的AppID"AppSecret="公司的AppSecret"ACCESS_TOKEN=getAccess_token(AppID,AppSecret)print(ACCESS_TOKEN)startDay="2016-12-25"endDay="2017-01-16"StartDay=datetime.datetime.strptime(startDay, "%Y-%m-%d").date()EndDay=datetime.datetime.strptime(endDay, "%Y-%m-%d").date()countdays=(EndDay-StartDay).dayscount=0dataAnalysis = DataAnalysis.getDataAnalysis()while (count < countdays):FirstDay=StartDay+datetime.timedelta(days=count)print("FirstDay : "+str(FirstDay) )dataAnalysis.getUser(ACCESS_TOKEN,FirstDay,FirstDay)dataAnalysis.getArticle(ACCESS_TOKEN,FirstDay,FirstDay)dataAnalysis.getupstreammsg(ACCESS_TOKEN,FirstDay,FirstDay)dataAnalysis.getInterface(ACCESS_TOKEN,FirstDay,FirstDay)count = count + 1
DataAnalysis.py
#!/usr/bin/python
# -*- coding: UTF-8 -*-import json
import requests
import MysqlUtilsclass getDataAnalysis:# "微信数据获取"def getUserInsert(self,name , datasource):#getUser中包含的字段tup2 = ("user_source","new_user","cancel_user","cumulate_user");try:for i in datasource['list']:for tup in tup2:if i.has_key(tup) :continueelse :i[tup] = 0sql = "INSERT INTO `getUser` (`datasource`, `ref_date`, `user_source`, `new_user` , `cancel_user` , `cumulate_user`) " \"VALUES ('%s','%s',%d,%d,%d,%d)" %(name,i['ref_date'],i['user_source'],i['new_user'],i['cancel_user'],i['cumulate_user'])print(sql)MysqlUtils.dbexecute(sql)except:print("ERROR ---- " ,name ,datasource)def getUser(self,access_token,begin_date,end_date):"用户分析数据接口"r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}#获取用户增减数据summary = requests.post("https://api.weixin.qq.com/datacube/getusersummary?access_token="+access_token, data=json.dumps(r_date))getusersummary = json.loads(summary.content)getDataAnalysis().getUserInsert("getusersummary",getusersummary)#获取累计用户数据cumulate = requests.post("https://api.weixin.qq.com/datacube/getusercumulate?access_token="+access_token, data=json.dumps(r_date))getusercumulate = json.loads(cumulate.content)getDataAnalysis().getUserInsert("getusercumulate",getusercumulate)def getArticledetailsInsert(self,name , datasource):tup2 = ("msgid","title");details = ("stat_date","target_user","int_page_read_user","int_page_read_count","ori_page_read_user","ori_page_read_count","share_user","share_count","add_to_fav_user","add_to_fav_count");try:for i in datasource['list']:for tup in tup2:if i.has_key(tup) :continueelse :i[tup] = 0for j in i['details']:for detail in details:if j.has_key(detail) :continueelse :j[detail] = 0sql = "INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`," \"`ori_page_read_count`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`) " \"VALUES ('%s','%s','%s','%s','%s',%d,%d,%d,%d,%d,%d,%d,%d,%d)" %(name,i['title'],i['ref_date'],j['stat_date'],i['msgid'],j['int_page_read_user'],j['int_page_read_count'],j['ori_page_read_user'],j['ori_page_read_count'],j['share_user'],j['share_count'],j['add_to_fav_user'],j['add_to_fav_count'],j['target_user'])print sqlMysqlUtils.dbexecute(sql)except:print("ERROR ---- " ,name ,datasource)def getArticleInsert(self,name , datasource):#getUser中包含的字段tup2 = ("ref_hour","stat_date","msgid","int_page_read_user""int_page_read_count","ori_page_read_user","ori_page_read_count","share_scene","share_user","share_count","add_to_fav_user","add_to_fav_count","target_user","user_source");try:for i in datasource['list']:for tup in tup2:if i.has_key(tup) :continueelse :i[tup] = 0if i.has_key("title") :continueelse :i["title"] = ""sql = "INSERT INTO `getArticle` (`datasource`,`title`,`ref_date`,`ref_hour`,`stat_date`,`msgid`,`int_page_read_user`,`int_page_read_count`,`ori_page_read_user`," \"`ori_page_read_count`,`share_scene`,`share_user`,`share_count`,`add_to_fav_user`,`add_to_fav_count`,`target_user`,`user_source`) " \"VALUES ('%s','%s','%s',%d,'%s','%s',%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)" %(name,i['title'],i['ref_date'],i['ref_hour'],i['stat_date'],i['msgid'],i['int_page_read_user'],i['int_page_read_count'],i['ori_page_read_user'],i['ori_page_read_count'],i['share_scene'],i['share_user'],i['share_count'],i['add_to_fav_user'],i['add_to_fav_count'],i['target_user'],i['user_source'])print sqlMysqlUtils.dbexecute(sql)except:print("ERROR ---- " ,name ,datasource)def getArticle(self,access_token,begin_date,end_date):"图文分析数据接口"r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}#获取图文群发每日数据 OKsummary = requests.post("https://api.weixin.qq.com/datacube/getarticlesummary?access_token="+access_token, data=json.dumps(r_date))getarticlesummary = json.loads(summary.content)getDataAnalysis().getArticleInsert("getarticlesummary",getarticlesummary)#获取图文群发总数据 有detailstotal = requests.post("https://api.weixin.qq.com/datacube/getarticletotal?access_token="+access_token, data=json.dumps(r_date))getarticletotal = json.loads(total.content)getDataAnalysis().getArticledetailsInsert("getarticletotal",getarticletotal)#获取图文统计数据 OKread = requests.post("https://api.weixin.qq.com/datacube/getuserread?access_token="+access_token, data=json.dumps(r_date))getuserread = json.loads(read.content)getDataAnalysis().getArticleInsert("getuserread",getuserread)# #获取图文统计分时数据 OKhour = requests.post("https://api.weixin.qq.com/datacube/getuserreadhour?access_token="+access_token, data=json.dumps(r_date))getuserreadhour = json.loads(hour.content)getDataAnalysis().getArticleInsert("getuserreadhour",getuserreadhour)# #获取图文分享转发数据share = requests.post("https://api.weixin.qq.com/datacube/getusershare?access_token="+access_token, data=json.dumps(r_date))getusershare = json.loads(share.content)getDataAnalysis().getArticleInsert("getusershare",getusershare)# #获取图文分享转发分时数据sharehour = requests.post("https://api.weixin.qq.com/datacube/getusersharehour?access_token="+access_token, data=json.dumps(r_date))getusersharehour = json.loads(sharehour.content)getDataAnalysis().getArticleInsert("getusersharehour",getusersharehour)def getupstreammsgInsert(self,name , datasource):#getUser中包含的字段tup2 = ("ref_hour","msg_type","msg_user","msg_count","count_interval","int_page_read_count","ori_page_read_user");try:for i in datasource['list']:for tup in tup2:if i.has_key(tup) :continueelse :i[tup] = 0sql = "INSERT INTO `getupStreammsg` (`datasource`,`ref_date`,`ref_hour`,`msg_type`,`msg_user`,`msg_count`,`count_interval`,`int_page_read_count`,`ori_page_read_user`) " \"VALUES ('%s','%s',%d,%d,%d,%d,%d,%d,%d)" %(name,i['ref_date'],i['ref_hour'],i['msg_type'],i['msg_user'],i['msg_count'],i['count_interval'],i['int_page_read_count'],i['ori_page_read_user'])print sqlMysqlUtils.dbexecute(sql)except:print("ERROR ---- " ,name ,datasource)def getupstreammsg(self,access_token,begin_date,end_date):"消息分析数据接口"r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}#获取消息发送概况数据streammsg = requests.post("https://api.weixin.qq.com/datacube/getupstreammsg?access_token="+access_token, data=json.dumps(r_date))getupstreammsg = json.loads(streammsg.content)getDataAnalysis().getupstreammsgInsert("getupstreammsg",getupstreammsg)#获取消息分送分时数据streammsghour = requests.post("https://api.weixin.qq.com/datacube/getupstreammsghour?access_token="+access_token, data=json.dumps(r_date))getupstreammsghour = json.loads(streammsghour.content)getDataAnalysis().getupstreammsgInsert("getupstreammsghour",getupstreammsghour)#获取消息发送周数据streammsgweek = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgweek?access_token="+access_token, data=json.dumps(r_date))getupstreammsgweek = json.loads(streammsgweek.content)getDataAnalysis().getupstreammsgInsert("getupstreammsgweek",getupstreammsgweek)#获取消息发送月数据streammsgmonth = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgmonth?access_token="+access_token, data=json.dumps(r_date))getupstreammsgmonth = json.loads(streammsgmonth.content)getDataAnalysis().getupstreammsgInsert("getupstreammsgmonth",getupstreammsgmonth)#获取消息发送分布数据streammsgdist = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdist?access_token="+access_token, data=json.dumps(r_date))getupstreammsgdist = json.loads(streammsgdist.content)getDataAnalysis().getupstreammsgInsert("getupstreammsgdist",getupstreammsgdist)#获取消息发送分布周数据streammsgdistweek = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdistweek?access_token="+access_token, data=json.dumps(r_date))getupstreammsgdistweek = json.loads(streammsgdistweek.content)getDataAnalysis().getupstreammsgInsert("getupstreammsgdistweek",getupstreammsgdistweek)#获取消息发送分布月数据streammsgdistmonth = requests.post("https://api.weixin.qq.com/datacube/getupstreammsgdistmonth?access_token="+access_token, data=json.dumps(r_date))getupstreammsgdistmonth = json.loads(streammsgdistmonth.content)getDataAnalysis().getupstreammsgInsert("getupstreammsgdistmonth",getupstreammsgdistmonth)def getInterfaceInsert(self,name , datasource):#getUser中包含的字段tup2 = ("ref_hour","callback_count","fail_count","total_time_cost","max_time_cost");try:for i in datasource['list']:for tup in tup2:if i.has_key(tup) :continueelse :i[tup] = 0sql = "INSERT INTO `getInterface` (`datasource`,`ref_date`,`ref_hour`,`callback_count`,`fail_count`,`total_time_cost`,`max_time_cost`) " \"VALUES ('%s','%s',%d,%d,%d,%d,%d)" %(name,i['ref_date'],i['ref_hour'],i['callback_count'],i['fail_count'],i['total_time_cost'],i['max_time_cost'])print sqlMysqlUtils.dbexecute(sql)except:print("ERROR ---- " ,name ,datasource)def getInterface(self,access_token,begin_date,end_date):"接口分析数据接口"r_date={'begin_date': str(begin_date), 'end_date': str(end_date)}#获取接口分析数据summary = requests.post("https://api.weixin.qq.com/datacube/getinterfacesummary?access_token="+access_token, data=json.dumps(r_date))getinterfacesummary = json.loads(summary.content)getDataAnalysis().getInterfaceInsert("getinterfacesummary",getinterfacesummary)#获取接口分析分时数据summaryhour = requests.post("https://api.weixin.qq.com/datacube/getinterfacesummaryhour?access_token="+access_token, data=json.dumps(r_date))getinterfacesummaryhour = json.loads(summaryhour.content)getDataAnalysis().getInterfaceInsert("getinterfacesummaryhour",getinterfacesummaryhour)
MysqlUtils.py
#!/usr/bin/python
# -*- coding: UTF-8 -*-import mysql.connector# 打开数据库连接
db=mysql.connector.connect(host='ip',user='root',passwd='123456',db='bi_data')# 使用cursor()方法获取操作游标
cursor=db.cursor()def dbexecute(sql):"插入数据 , 删除数据 "try:# 执行sql语句cursor.execute(sql)# 提交到数据库执行db.commit()except:# 发生错误时回滚db.rollback()#关闭数据库连接db.close()def dbquery(sql):"SQL 查询语句"try:# 执行SQL语句cursor.execute(sql)# 获取所有记录列表results = cursor.fetchall()return resultsexcept:print "Error: unable to fecth data"def dbClose():# 关闭数据库连接cursor.close()if __name__ == "__main__":json = {"list":[{"ref_date":"2017-01-02","user_source":0,"cumulate_user":5}]}tup2 = ("user_source","new_user","cancel_user","cumulate_user");for i in json['list']:for tup in tup2:if i.has_key(tup) :print i.has_key('cancel_user')else :i[tup] = 0print(i)
6、结果展示:
如果您喜欢我写的博文,读后觉得收获很大,不妨小额赞助我一下,让我有动力继续写出高质量的博文,感谢您的赞赏!!!
这篇关于微信统计数据的获取及存储的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!