From f14148c72af9fb1a3b701b92a3d02615f7b6c47e Mon Sep 17 00:00:00 2001 From: dev_xulongjin Date: Fri, 23 May 2025 09:31:27 +0800 Subject: [PATCH] =?UTF-8?q?build(bigdata-system-dev):=20=E5=88=9D=E5=A7=8B?= =?UTF-8?q?=E5=8C=96=E9=A1=B9=E7=9B=AE=E4=BE=9D=E8=B5=96=E5=92=8C=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E5=BA=93=E8=A1=A8=E7=BB=93=E6=9E=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 新增 MySQL 数据库表结构文件,创建多个数据表和视图- 在 pom.xml 中添加 Scala、Flink、Spark、MySQL、Fastjson 和 Redis 相关依赖- 更新项目属性,设置 Scala版本为 2.12 - 移除原有的 Java编译源和目标版本属性 --- bigdata-system-dev/pom.xml | 123 +++++++++++++++++- .../scala/experiment_4/mysql创建表SQL.sql | 110 ++++++++++++++++ 2 files changed, 230 insertions(+), 3 deletions(-) create mode 100755 bigdata-system-dev/src/main/scala/experiment_4/mysql创建表SQL.sql diff --git a/bigdata-system-dev/pom.xml b/bigdata-system-dev/pom.xml index ce8357f..0630db5 100644 --- a/bigdata-system-dev/pom.xml +++ b/bigdata-system-dev/pom.xml @@ -8,10 +8,127 @@ bigdata-system-dev 1.0-SNAPSHOT + - 8 - 8 - UTF-8 + 2.12 + 1.14.0 + 2.4.1 + 2.4.2 + 1.2.83 + 3.3.0 + 1.1.5 + 8.0.28 + + + + org.apache.flink + flink-runtime-web_${scala.version} + ${flink.version} + + + org.apache.flink + flink-clients_${scala.version} + ${flink.version} + + + org.apache.flink + flink-streaming-scala_${scala.version} + ${flink.version} + + + org.apache.flink + flink-connector-kafka_${scala.version} + ${flink.version} + + + org.apache.flink + flink-table-planner_${scala.version} + ${flink.version} + + + org.apache.flink + flink-table-api-scala-bridge_${scala.version} + ${flink.version} + + + org.apache.flink + flink-connector-jdbc_${scala.version} + ${flink.version} + + + + + + org.apache.spark + spark-core_2.12 + 3.2.1 + + + + org.apache.spark + spark-sql_2.12 + 3.2.1 + + + + org.apache.spark + spark-streaming_2.12 + 3.2.1 + + + + org.apache.spark + spark-streaming-kafka-0-10_2.12 + 3.2.1 + + + + org.apache.spark + spark-graphx_2.12 + 3.2.1 + + + + + mysql + mysql-connector-java + ${mysql-connector.verion} + + + + + com.alibaba + fastjson + ${fastjson.version} + + + + + redis.clients + jedis + ${redis.version} + + + org.apache.flink + flink-connector-redis_2.11 + + + org.apache.flink + flink-shaded-hadoop2 + + + org.apache.commons + commons-lang3 + + + ${flink-connector-redis.verion} + + + org.xerial.snappy + snappy-java + 1.1.10.1 + + \ No newline at end of file diff --git a/bigdata-system-dev/src/main/scala/experiment_4/mysql创建表SQL.sql b/bigdata-system-dev/src/main/scala/experiment_4/mysql创建表SQL.sql new file mode 100755 index 0000000..349904c --- /dev/null +++ b/bigdata-system-dev/src/main/scala/experiment_4/mysql创建表SQL.sql @@ -0,0 +1,110 @@ +show databases; + +create database if not exists spark_web CHARACTER SET utf8mb4; + +use spark_web; +#3.3创建一个数据库汇总表 +create table sum ( + imei varchar(10) default null comment'用户编号', + log_times int(2) default null comment'登陆次数', + online_time int(10) default null comment'在线时长(秒)' +) engine=innodb default charset=utf8; + + +#3.4创建一个数据明细表 +create table detail ( + imei varchar(10) default null comment'用户编号', + first_login_time varchar(100) default null comment'首次登录时间', + online_time int(10) default null comment'在线时长(秒)' +) engine=innodb default charset=utf8; + +#3.5创建一个数据原始明细表 +create table cleanMap ( + imei varchar(10) default null comment'用户编号', + logid varchar(100) default null comment'登录时间', + requestip varchar(11) default null comment'登录IP地址', + areacode varchar(11) default null comment'登录区域', + requesttype varchar(11) default null comment'请求类型', + channelne varchar(11) default null comment'渠道' +)engine=innodb default charset=utf8; + + +#3.6创建一个区域维表 +create table t_dim_area ( + areacode varchar(11) default null comment'区域编码', + areaname varchar(100) default null comment'区域名称' +)engine=innodb default charset=utf8; + + +#一定要插入数据,否则前后端无法进行数据分析 + +insert into t_dim_area values('0','浙江省丽水市'); +insert into t_dim_area values('1','福建省南平市'); +insert into t_dim_area values('2','福建省福州市'); +#3.7 创建一个渠道维表 +create table t_dim_channel ( + channelno varchar(11) default null comment'渠道编号', + channelname varchar(100) default null comment'渠道名称' +)engine=innodb default charset=utf8 row_format=dynamic; + +insert into t_dim_channel values ('0','手机'); +insert into t_dim_channel values ('1','PC'); +insert into t_dim_channel values ('2','平板电脑'); + + +#3.8创建请求类型维表 +create table t_dim_requesttype ( + requesttype varchar(11) default null comment'请求类型', + requesttypename varchar(100) default null comment'请求类型名称' +)engine=innodb default charset=utf8 row_format=dynamic; + +insert into t_dim_requesttype values ('0','GET方式'); +insert into t_dim_requesttype values ('1','POST方式'); + + +#3.9创建一个五个视图 +#3.9.1用户渠道趋势分析 +create or replace view v_area_channel as + select b.areaname,c.channelname,count(distinct imei) num + from cleanMap a,t_dim_area b,t_dim_channel c + where a.areacode=b.areacode and a.channelne=c.channelno + group by b.areaname,c.channelname; +#3.9.2用户请求类型对比情况 +create or replace view v_area_requesttype as + select b.areaname,c.requesttypename,count(distinct imei) num + from cleanMap a,t_dim_area b,t_dim_requesttype c + where a.areacode=b.areacode and a.requesttype=c.requesttype + group by b.areaname,c.requesttypename; +#3.9.3用户渠道饼图分析 +create or replace view v_channelno as + select b.channelname,count(distinct imei) num + from cleanMap a,t_dim_channel b + where a.channelne=b.channelno + group by b.channelname; +#3.9.4用户登录情况分析 +create or replace view v_user_login as + select distinct a.imei,a.log_times,b.first_login_time,a.online_time + from sum a,detail b + where a.imei=b.imei; +#3.9.5用户详细情况分析 +create or replace view v_user_detail as + select distinct a.imei,a.requestip,d.requesttypename,c.first_login_time, + b.log_times,b.online_time,e.areaname,f.channelname + from cleanMap a,sum b,detail c,t_dim_requesttype d,t_dim_area e,t_dim_channel f + where a.imei=b.imei and a.imei=c.imei + and a.requesttype=d.requesttype + and a.areacode=e.areacode and a.channelne=f.channelno; +#查看spark_web的表 + +show tables; + + +show databases; +use spark_web; +show tables; + +select * from detail; + +select * from sum; + +select * from cleanMap; \ No newline at end of file