pms-parent/pms-global/src/main/java/com/dy/pmsGlobal/daoOth/OthStatisticWorkloadMapper.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
pms-parent/pms-global/src/main/java/com/dy/pmsGlobal/pojoOth/OthStatisticWorkload.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
pms-parent/pms-global/src/main/resources/mapper/OthStatisticWorkloadMapper.xml | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
pms-parent/pms-global/src/main/resources/mapper/StaDeviceProductionLogPastMapper.xml | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
pms-parent/pms-web-other/src/main/java/com/dy/pmsOther/config/AppStartupRunner.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 | |
pms-parent/pms-web-other/src/main/java/com/dy/pmsOther/task/WorkloadTask.java | ●●●●● 补丁 | 查看 | 原始文档 | blame | 历史 |
pms-parent/pms-global/src/main/java/com/dy/pmsGlobal/daoOth/OthStatisticWorkloadMapper.java
New file @@ -0,0 +1,31 @@ package com.dy.pmsGlobal.daoOth; import com.dy.pmsGlobal.pojoOth.OthStatisticWorkload; import java.time.LocalDate; import java.util.Date; /** * @author User * @description 针对表【oth_statistic_workload】的数据库操作Mapper * @createDate 2024-10-12 11:06:03 * @Entity com.dy.pmsGlobal.pojoOth.OthStatisticWorkload */ public interface OthStatisticWorkloadMapper { int deleteByPrimaryKey(Long id); int insert(OthStatisticWorkload record); int insertSelective(OthStatisticWorkload record); OthStatisticWorkload selectByPrimaryKey(Long id); int updateByPrimaryKeySelective(OthStatisticWorkload record); int updateByPrimaryKey(OthStatisticWorkload record); Date selectMaxDate(); void insertBatch(Date startDt, Date endDt); } pms-parent/pms-global/src/main/java/com/dy/pmsGlobal/pojoOth/OthStatisticWorkload.java
New file @@ -0,0 +1,64 @@ package com.dy.pmsGlobal.pojoOth; import com.alibaba.fastjson2.annotation.JSONField; import com.alibaba.fastjson2.writer.ObjectWriterImplToString; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import java.io.Serializable; import java.util.Date; import com.dy.common.po.BaseEntity; import lombok.*; /** * * @TableName oth_statistic_workload */ @TableName(value ="oth_statistic_workload", autoResultMap = true) @Data @Builder @ToString @NoArgsConstructor @AllArgsConstructor public class OthStatisticWorkload implements BaseEntity { /** * */ @JSONField(serializeUsing= ObjectWriterImplToString.class) @TableId(value = "id", type = IdType.INPUT) private Long id; /** * 统计日期 */ private Date statisticDate; /** * 用户实体编号 */ @JSONField(serializeUsing= ObjectWriterImplToString.class) private Long userId; /** * 姓名 */ private String userName; /** * 文字描述工作类型 */ private String type; /** * 节点作业内容 */ private String nodeContent; /** * 统计数 */ private Integer number; } pms-parent/pms-global/src/main/resources/mapper/OthStatisticWorkloadMapper.xml
New file @@ -0,0 +1,127 @@ <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.dy.pmsGlobal.daoOth.OthStatisticWorkloadMapper"> <resultMap id="BaseResultMap" type="com.dy.pmsGlobal.pojoOth.OthStatisticWorkload"> <id property="id" column="id" jdbcType="BIGINT"/> <result property="statisticDate" column="statistic_date" jdbcType="DATE"/> <result property="userId" column="user_id" jdbcType="BIGINT"/> <result property="userName" column="user_name" jdbcType="VARCHAR"/> <result property="type" column="type" jdbcType="VARCHAR"/> <result property="nodeContent" column="node_content" jdbcType="VARCHAR"/> <result property="number" column="number" jdbcType="INTEGER"/> </resultMap> <sql id="Base_Column_List"> id,statistic_date,user_id, user_name,type,node_content, number </sql> <select id="selectByPrimaryKey" parameterType="java.lang.Long" resultMap="BaseResultMap"> select <include refid="Base_Column_List" /> from oth_statistic_workload where id = #{id,jdbcType=BIGINT} </select> <select id="selectMaxDate" resultType="java.util.Date"> select max(statistic_date) from oth_statistic_workload </select> <delete id="deleteByPrimaryKey" parameterType="java.lang.Long"> delete from oth_statistic_workload where id = #{id,jdbcType=BIGINT} </delete> <insert id="insert" keyColumn="id" keyProperty="id" parameterType="com.dy.pmsGlobal.pojoOth.OthStatisticWorkload" useGeneratedKeys="true"> insert into oth_statistic_workload ( id,statistic_date,user_id ,user_name,type,node_content ,number) values (#{id,jdbcType=BIGINT},#{statisticDate,jdbcType=DATE},#{userId,jdbcType=BIGINT} ,#{userName,jdbcType=VARCHAR},#{type,jdbcType=VARCHAR},#{nodeContent,jdbcType=VARCHAR} ,#{number,jdbcType=INTEGER}) </insert> <insert id="insertSelective" keyColumn="id" keyProperty="id" parameterType="com.dy.pmsGlobal.pojoOth.OthStatisticWorkload" useGeneratedKeys="true"> insert into oth_statistic_workload <trim prefix="(" suffix=")" suffixOverrides=","> <if test="id != null">id,</if> <if test="statisticDate != null">statistic_date,</if> <if test="userId != null">user_id,</if> <if test="userName != null">user_name,</if> <if test="type != null">type,</if> <if test="nodeContent != null">node_content,</if> <if test="number != null">number,</if> </trim> <trim prefix="values (" suffix=")" suffixOverrides=","> <if test="id != null">#{id,jdbcType=BIGINT},</if> <if test="statisticDate != null">#{statisticDate,jdbcType=DATE},</if> <if test="userId != null">#{userId,jdbcType=BIGINT},</if> <if test="userName != null">#{userName,jdbcType=VARCHAR},</if> <if test="type != null">#{type,jdbcType=VARCHAR},</if> <if test="nodeContent != null">#{nodeContent,jdbcType=VARCHAR},</if> <if test="number != null">#{number,jdbcType=INTEGER},</if> </trim> </insert> <insert id="insertBatch"> INSERT INTO oth_statistic_workload (statistic_date,user_id ,user_name,type,node_content ,number) WITH total as (select DISTINCT(t.device_no),t.curr_node,t.node_content,t.number,p.pro_name, p.plan_name,u.id user_id,u.`name` user_name FROM (select * from sta_device_production_log_past where repair_id IS NULL and out_time BETWEEN #{startDt} AND #{endDt} union select * from sta_device_production_log where repair_id IS NULL and out_time BETWEEN #{startDt} AND #{endDt} ) t left join (select pap.id,pap.`name` as plan_name,pp.`name` as pro_name from pr_assembly_plan pap, plt_product pp where pap.pro_id = pp.id) p on t.plan_id = p.id left JOIN ba_user u on u.id=t.updated_by) select DATE_FORMAT(#{startDt}, '%Y-%m-%d') as statistic_date,user_id, user_name,'计划任务' AS type, node_content ,sum(number) as number from total where curr_node IS NOT NULL GROUP BY user_id,user_name,node_content UNION select DATE_FORMAT(#{startDt}, '%Y-%m-%d') as statistic_date, user_id, user_name,'临时任务' AS type, node_content, sum(number) as number from total where curr_node IS NULL AND (device_no !='' AND device_no IS NOT NULL) GROUP BY user_id,user_name,node_content UNION select DATE_FORMAT(#{startDt}, '%Y-%m-%d') as statistic_date,user_id , user_name,'临时任务(无设备码)' AS type,node_content, sum(number) as number from total where curr_node IS NULL AND (device_no IS NULL OR device_no ='') GROUP BY user_id,user_name,node_content; </insert> <update id="updateByPrimaryKeySelective" parameterType="com.dy.pmsGlobal.pojoOth.OthStatisticWorkload"> update oth_statistic_workload <set> <if test="statisticDate != null"> statistic_date = #{statisticDate,jdbcType=DATE}, </if> <if test="userId != null"> user_id = #{userId,jdbcType=BIGINT}, </if> <if test="userName != null"> user_name = #{userName,jdbcType=VARCHAR}, </if> <if test="type != null"> type = #{type,jdbcType=VARCHAR}, </if> <if test="nodeContent != null"> node_content = #{nodeContent,jdbcType=VARCHAR}, </if> <if test="number != null"> number = #{number,jdbcType=INTEGER}, </if> </set> where id = #{id,jdbcType=BIGINT} </update> <update id="updateByPrimaryKey" parameterType="com.dy.pmsGlobal.pojoOth.OthStatisticWorkload"> update oth_statistic_workload set statistic_date = #{statisticDate,jdbcType=DATE}, user_id = #{userId,jdbcType=BIGINT}, user_name = #{userName,jdbcType=VARCHAR}, type = #{type,jdbcType=VARCHAR}, node_content = #{nodeContent,jdbcType=VARCHAR}, number = #{number,jdbcType=INTEGER} where id = #{id,jdbcType=BIGINT} </update> </mapper> pms-parent/pms-global/src/main/resources/mapper/StaDeviceProductionLogPastMapper.xml
@@ -45,6 +45,7 @@ </select> <!-- 将过往日志移动到 sta_device_production_log_past --> <insert id="insertIntoPastLogs"> INSERT INTO sta_device_production_log_past SELECT * FROM sta_device_production_log WHERE DATE(out_time) != CURDATE(); </insert> pms-parent/pms-web-other/src/main/java/com/dy/pmsOther/config/AppStartupRunner.java
New file @@ -0,0 +1,20 @@ package com.dy.pmsOther.config; import com.dy.common.schedulerTask.SchedulerTaskSupport; import com.dy.pmsGlobal.daoSta.StaDeviceProductionLogPastMapper; import com.dy.pmsOther.task.WorkloadTask; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; @Slf4j @Component public class AppStartupRunner implements CommandLineRunner { @Override public void run(String... args) throws Exception { //每天人员工作量统计 SchedulerTaskSupport.addDailyJob("workloadTask", "station", WorkloadTask.class, null, 0, 5); } } pms-parent/pms-web-other/src/main/java/com/dy/pmsOther/task/WorkloadTask.java
New file @@ -0,0 +1,47 @@ package com.dy.pmsOther.task; import com.dy.common.schedulerTask.TaskJob; import com.dy.common.springUtil.SpringContextUtil; import com.dy.pmsGlobal.daoOth.OthStatisticWorkloadMapper; import com.dy.pmsGlobal.pojoSta.StaDeviceProductionLog; import lombok.extern.slf4j.Slf4j; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import org.springframework.transaction.annotation.Transactional; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; import java.util.Date; import java.util.List; @Slf4j public class WorkloadTask extends TaskJob { @Override @Transactional public void execute(JobExecutionContext ctx) throws JobExecutionException { log.info("启动统计人员工作量任务"); OthStatisticWorkloadMapper workloadMapper = SpringContextUtil.getBean(OthStatisticWorkloadMapper.class); //找到最大一条日期记录 Date maxDate = workloadMapper.selectMaxDate(); //往后循环天数汇总 if(maxDate != null){ //循环统计 LocalDate firstDay = maxDate.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); LocalDate today = LocalDate.now(); LocalDate nextDate = firstDay.plusDays(1); long daysBetween = ChronoUnit.DAYS.between(firstDay,today); for(int i=1 ; i<= daysBetween ; i++){ Date startDt = Date.from(LocalDateTime.of(nextDate, LocalTime.of(0,0,0)).atZone(ZoneId.systemDefault()).toInstant()); ; Date endDt = Date.from(LocalDateTime.of(nextDate, LocalTime.of(23,59,59)).atZone(ZoneId.systemDefault()).toInstant()); workloadMapper.insertBatch(startDt,endDt); nextDate = firstDay.plusDays(i); } } log.info("生产日志移动任务结束"); } }