This commit is contained in:
parent
fff3e99e3a
commit
566b8ba1d3
|
@ -58,3 +58,31 @@ insert into last_day_counts(last_day, product_c, comment_c) values(4, 0, 0);
|
|||
insert into last_day_counts(last_day, product_c, comment_c) values(5, 0, 0);
|
||||
|
||||
|
||||
create table top10_sells (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`order_n` int default 0,
|
||||
`product_name` varchar(1024) comment '商品名',
|
||||
`comment_c` int default 0 comment '销量',
|
||||
`good_c` int default 0 comment '好评',
|
||||
`price` int default 0 comment '价格',
|
||||
PRIMARY KEY (`id`)
|
||||
) comment = '排名前10销量产品';
|
||||
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(1, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(2, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(3, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(4, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(5, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(6, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(7, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(8, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(9, '', 0, 0);
|
||||
insert into top10_sells(order_n, product_name, comment_c, good_c) values(10, '', 0, 0);
|
||||
|
||||
create table datas (
|
||||
`id` int(11) NOT NULL AUTO_INCREMENT,
|
||||
`key` varchar(255),
|
||||
`data` json,
|
||||
PRIMARY KEY (`id`)
|
||||
) comment = '通用配置表';
|
||||
insert into datas (`key`, data) values ('from_type', '{}');
|
||||
|
|
|
@ -47,3 +47,23 @@ class LastDayCounts(models.Model):
|
|||
class Meta:
|
||||
managed = False
|
||||
db_table = 'last_day_counts'
|
||||
|
||||
class Top10Sells(models.Model):
|
||||
order_n = models.IntegerField(blank=True, null=True)
|
||||
product_name = models.CharField(max_length=1024, blank=True, null=True)
|
||||
comment_c = models.IntegerField(blank=True, null=True)
|
||||
good_c = models.IntegerField(blank=True, null=True)
|
||||
price = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'top10_sells'
|
||||
|
||||
|
||||
class Datas(models.Model):
|
||||
key = models.CharField(max_length=255, blank=True, null=True)
|
||||
data = models.TextField(blank=True, null=True) # This field type is a guess.
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'datas'
|
||||
|
|
|
@ -12,7 +12,6 @@ $(function(){
|
|||
|
||||
var myColor = ['#1089E7', '#F57474', '#56D0E3', '#F8B448', '#8B78F6'];
|
||||
|
||||
//主要传染病
|
||||
var histogramChart1 = echarts.init(document.getElementById('histogramChart1'));
|
||||
histogramChart1.setOption({
|
||||
|
||||
|
@ -221,7 +220,7 @@ $(function(){
|
|||
|
||||
})
|
||||
|
||||
//主要疾病排行
|
||||
//TOP10销量趋势
|
||||
var histogramChart3 = echarts.init(document.getElementById('histogramChart3'));
|
||||
histogramChart3.setOption({
|
||||
|
||||
|
|
|
@ -28,4 +28,6 @@ def crawl(request):
|
|||
"last_day_comment": json.dumps(last_day_comment)})
|
||||
|
||||
def result(request):
|
||||
|
||||
|
||||
return render(request, 'myapp/result.html')
|
|
@ -5,6 +5,7 @@ import os
|
|||
import pymysql
|
||||
import datetime
|
||||
import time
|
||||
import json
|
||||
|
||||
def mysql_query(sql):
|
||||
db = pymysql.connect("localhost","root","123456789","sparkproject" )
|
||||
|
@ -76,6 +77,22 @@ def get_last_day_count(spark):
|
|||
mysql_execute("update last_day_counts set product_c = {}, comment_c = {} where last_day = {}".format(
|
||||
jd_last_count, jd_comment_last_count, i+1))
|
||||
|
||||
def collect_top10_sells(spark):
|
||||
df = spark.sql("select * from jd order by good_count desc limit 10")
|
||||
i = 1
|
||||
for row in df.rdd.collect():
|
||||
mysql_execute("update top10_sells set product_name = {}, good_c = {}, price={} where order_n = {} ".format(
|
||||
row["name"], row["good_count"], int(row["price"]),i))
|
||||
i += 1
|
||||
|
||||
def collect_from_type(spark):
|
||||
df = spark.sql("select from_type, count(*) N from jd_comment group by from_type")
|
||||
data = {}
|
||||
for row in df.rdd.collect():
|
||||
if row["from_type"]:
|
||||
data[row["from_type"]] = row["N"]
|
||||
mysql_execute("update datas set data = {} where `key` = 'from_type'".format( json.dumps(data) ))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# $example on:spark_hive$
|
||||
|
@ -96,6 +113,8 @@ if __name__ == "__main__":
|
|||
|
||||
if count == 0 or count >100:
|
||||
get_last_day_count(spark)
|
||||
collect_top10_sells(spark)
|
||||
collect_from_type(spark)
|
||||
count = 1
|
||||
|
||||
time.sleep(10)
|
||||
|
|
Loading…
Reference in New Issue