wenhongquan 3 månader sedan
förälder
incheckning
14d2471ee1

+ 7 - 0
ruoyi-admin/pom.xml

@@ -100,6 +100,13 @@
             <scope>test</scope>
         </dependency>
 
+
+
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+        </dependency>
+
         <!-- skywalking 整合 logback -->
 <!--        <dependency>-->
 <!--            <groupId>org.apache.skywalking</groupId>-->

+ 107 - 0
ruoyi-admin/src/main/java/org/dromara/web/config/KafkaConfig.java

@@ -0,0 +1,107 @@
+package org.dromara.web.config;
+
+import org.apache.kafka.clients.admin.AdminClient;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.kafka.annotation.EnableKafka;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
+import org.springframework.kafka.core.*;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ *@ClassName KafkaConfig
+ *@Description: TODO kafka的配置类
+ **/
+
+@Configuration
+@EnableKafka
+public class KafkaConfig {
+    @Value("${kafka.bootstrap-servers}")
+    private String kafkaServer = "kafka-ip:9092";//kafka地址
+
+
+    /**
+     * @Title producerFactory
+     * @Description TODO 生产者工厂类,设置生产者相关配置
+     * @return org.springframework.kafka.core.ProducerFactory<java.lang.String,java.lang.Object>
+     */
+    @Bean
+    public ProducerFactory<String, Object> producerFactory() {
+        Map<String, Object> props = new HashMap<>();
+        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);//kafka 地址
+        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);//序列化
+        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);//序列化
+        props.put(ProducerConfig.ACKS_CONFIG, "all");//确认机制,all是所有副本确认,1是一个副本确认,0是不需要副本确认
+        props.put(ProducerConfig.BATCH_SIZE_CONFIG, "10");//批量发送大小
+        props.put(ProducerConfig.LINGER_MS_CONFIG, "1");//批量发送等待时间  和上面的batch-size谁先到先发送
+        return new DefaultKafkaProducerFactory<>(props);
+    }
+
+    /**
+     * @Title kafkaTemplate
+     * @Description TODO kafka生产者工具类
+     * @return org.springframework.kafka.core.KafkaTemplate<java.lang.String,java.lang.Object>
+     */
+    @Bean
+    public KafkaTemplate<String, Object> kafkaTemplate() {
+        return new KafkaTemplate<>(producerFactory());
+    }
+
+
+
+    /**
+     * @Title consumerFactory
+     * @Description TODO 消费者工厂类,配置消费者的一些配置
+     * @return org.springframework.kafka.core.ConsumerFactory<java.lang.String,java.lang.Object>
+     */
+    @Bean
+    public ConsumerFactory<String, Object> consumerFactory() {
+        Map<String, Object> props = new HashMap<>();
+        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
+        props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 5 * 1024 * 1024);//每次抓取消息的大小
+        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);//是否自动提交
+        props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 50 * 1000 * 1000);//请求超时时间
+        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
+        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 60000000);
+        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        return new DefaultKafkaConsumerFactory<>(props);
+
+    }
+
+    /**
+     * @Title kafkaListenerContainerFactory
+     * @Description TODO 监听容器的工厂类,创建监听容器时使用
+     * @return org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory<java.lang.String,java.lang.Object>
+     */
+    @Bean
+    public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactory() {
+        ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
+        factory.setConsumerFactory(consumerFactory());
+        return factory;
+    }
+
+    /**
+     * @Title adminClient
+     * @Description TODO kafka客户端
+     * @return org.apache.kafka.clients.admin.AdminClient
+     */
+    @Bean
+    public AdminClient adminClient() {
+        Properties props = new Properties();
+        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaServer);
+        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
+        AdminClient adminClient = AdminClient.create(props);
+        return adminClient;
+    }
+}
+

+ 205 - 0
ruoyi-admin/src/main/java/org/dromara/web/task/KafkaMessageConsumer.java

@@ -0,0 +1,205 @@
+package org.dromara.web.task;
+
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.io.FileUtil;
+import cn.hutool.core.thread.ThreadUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.http.HttpUtil;
+import cn.hutool.json.JSONArray;
+import cn.hutool.json.JSONObject;
+import cn.hutool.json.JSONUtil;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dromara.common.core.service.OssService;
+import org.dromara.common.core.utils.SpringUtils;
+import org.dromara.system.domain.bo.TblEventBo;
+import org.dromara.system.domain.vo.SysOssUploadVo;
+import org.dromara.system.domain.vo.SysOssVo;
+import org.dromara.system.service.ISysOssService;
+import org.dromara.system.service.ITblEventService;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+public class KafkaMessageConsumer {
+    public void consumerMessage(List<ConsumerRecord<String, Object>> message) {
+        System.out.println("收到消息:" + message);
+        message.forEach(consumerRecord ->{
+
+            if(consumerRecord.topic().equals("zs_objects_r2p3")){
+                    ThreadUtil.execAsync(() -> {
+                        try{JSONObject msg =  JSONUtil.parseObj( consumerRecord.value());
+                            if(msg.getInt("MsgType")!=1001) return;
+                            JSONArray list = msg.getJSONArray("ChannelEvtInfo");
+                            for (int i = 0; i < list.size(); i++) {
+                                JSONObject item = list.getJSONObject(i);
+                                JSONArray envlist = item.getJSONArray("Evt_List");
+                                for (int j = 0; j < envlist.size(); j++) {
+                                    JSONObject envitem = envlist.getJSONObject(j);
+                                    envitem.set("from","源驶科技");
+                                    envitem.set("lx", getEventName(envitem.getInt("EvtType")));
+                                    TblEventBo tblEventBo = new TblEventBo();
+                                    tblEventBo.setCreateTime(DateUtil.date(msg.getLong("Timestamp")));
+                                    tblEventBo.setExt2(JSONUtil.toJsonStr(envitem));
+                                    List<String> urls = new ArrayList<>();
+                                    //下载图片
+                                    String path =envitem.getStr("EventImagePath");
+                                    String path2 =envitem.getStr("EventImagePath2");
+                                    String path3 =envitem.getStr("EventImagePath3");
+                                    String path4 =envitem.getStr("EventImagePath4");
+                                    String path5 =envitem.getStr("EventVideoPath");
+                                    if(!StrUtil.isEmptyIfStr( path)){
+                                        path = downloadImage(path);
+                                        urls.add( path);
+                                    }
+                                    if(!StrUtil.isEmptyIfStr( path2)){
+                                        path2 = downloadImage(path2);
+                                        urls.add( path2);
+                                    }
+                                    if(!StrUtil.isEmptyIfStr( path3)){
+                                        path3 = downloadImage(path3);
+                                        urls.add( path3);
+                                    }
+                                    if(!StrUtil.isEmptyIfStr( path4)){
+                                        path4 = downloadImage(path4);
+                                        urls.add( path4);
+                                    }
+                                    if(!StrUtil.isEmptyIfStr( path5)){
+                                        path5 = downloadImage(path5);
+                                        urls.add( path5);
+                                    }
+                                    tblEventBo.setExt1(JSONUtil.toJsonStr(urls));
+                                    tblEventBo.setAddr(msg.getStr("DevNo"));
+                                    SpringUtils.getBean(ITblEventService.class).insertByBo(tblEventBo);
+                                }
+                            }
+
+                        }catch (Exception e){
+                            e.printStackTrace();
+                        }
+
+                    });
+
+            }
+        });
+
+        //消息确认
+//        ack.acknowledge();
+    }
+
+    private String downloadImage(String path){
+        if(!FileUtil.exist("./temp/")){
+            FileUtil.mkdir("./temp/");
+        }
+        File file = HttpUtil.downloadFileFromUrl(path, FileUtil.file("./temp/"));
+        SysOssVo oss = SpringUtils.getBean(ISysOssService.class).upload(file);
+        return oss.getFileName();
+    }
+
+
+    /**
+     * 根据传入的十进制事件码返回对应的事件名称
+     *
+     * @param code 事件码(十进制整数)
+     * @return 对应的事件名称,若未匹配到则返回 null
+     */
+    public static String getEventName(int code) {
+        switch (code) {
+            case 2: return "慢速";
+            case 4: return "超速";
+            case 8: return "压线";
+            case 16: return "异常变道";
+            case 17: return "周界防范";
+            case 64: return "进入非法区域";
+            case 65: return "机动车进入非法区域";
+            case 66: return "非机动车进入非法区域";
+            case 128: return "非法停止";
+            case 129: return "机动车非法停止";
+            case 130: return "非机动车非法停止";
+            case 256: return "非法逆行";
+            case 257: return "机动车逆行";
+            case 258: return "非机动车逆行";
+            case 259: return "行人非法逆行";
+            case 512: return "抛洒物";
+            case 1024: return "拥堵";
+            case 2048: return "行人";
+            case 2304: return "人员攀爬";
+            case 2305: return "翻越栏杆";
+            case 2306: return "人员聚集";
+            case 2307: return "人员离岗";
+            case 2308: return "徘徊检测";
+            case 2309: return "人员滞留";
+            case 2310: return "人员睡岗";
+            case 2311: return "未穿工服检测";
+            case 2312: return "未戴安全帽";
+            case 2313: return "抽烟";
+            case 2314: return "打电话";
+            case 2315: return "跌倒";
+            case 2316: return "陌生人检测";
+            case 2317: return "火焰检测";
+            case 2318: return "有色气体检测";
+            case 2319: return "占用消防通道";
+            case 2320: return "人员过少";
+            case 2321: return "打架";
+            case 2322: return "玩手机";
+            case 2323: return "下蹲";
+            case 2324: return "奔跑";
+            case 2325: return "未戴口罩";
+            case 2326: return "电动车入";
+            case 2327: return "电梯开门";
+            case 2328: return "落水";
+            case 2329: return "渣土车未盖";
+            case 2330: return "小动物";
+            case 2331: return "未穿反光衣";
+            case 2332: return "短袖";
+            case 2333: return "长袖";
+            case 2334: return "灭火器移位";
+            case 2335: return "遗留物";
+            case 2336: return "占道经营";
+            case 2337: return "垃圾堆积";
+            case 2338: return "垃圾满溢";
+            case 2339: return "道路积水";
+            case 2340: return "裸土未盖";
+            case 2341: return "垃圾桶起火";
+            case 2342: return "焚烧垃圾";
+            case 2343: return "焚烧秸秆";
+            case 2344: return "街道垃圾识别";
+            case 2345: return "违规店外经营";
+            case 2346: return "违规非法摆摊";
+            case 2347: return "违规撑伞";
+            case 2348: return "人员越界";
+            case 2349: return "重点人员检测";
+            case 2350: return "老鼠识别";
+            case 2351: return "电梯超员";
+            case 4096: return "大货车禁行";
+            case 4097: return "危化品车辆";
+            case 4102: return "大货车慢行";
+            case 4103: return "大货车超速";
+            case 8192: return "应急车道占道";
+            case 16384: return "非法走机动车道";
+            case 16386: return "外卖/快递车闯入";
+            case 32768: return "违法上下客";
+            case 32769: return "未礼让行人";
+            case 32770: return "区域人数检测";
+            case 65536: return "施工";
+            case 131072: return "交通事故";
+            case 262144: return "违法通行";
+            case 262145: return "违法直行";
+            case 262146: return "违法右转";
+            case 262147: return "违法左转";
+            case 262148: return "违法掉头";
+            case 262149: return "违法压线";
+            case 524288: return "排队超限";
+            case 524289: return "机动车驶离";
+            case 1048576: return "机动车危险驾驶";
+            case 2097152: return "非机动车未戴头盔驾驶";
+            case 2097153: return "非机动车载人";
+            case 4194304: return "排队溢出";
+            case 8388608: return "高温";
+            case 134217729: return "行人闯红灯";
+            case 134217730: return "非机动车闯红灯";
+            default: return null; // 可选:返回默认值如 "未知事件"
+        }
+    }
+
+}

+ 48 - 0
ruoyi-admin/src/main/java/org/dromara/web/task/SysKafkaData.java

@@ -0,0 +1,48 @@
+package org.dromara.web.task;
+
+import jakarta.annotation.PostConstruct;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.dromara.web.utils.KafkaUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.kafka.support.Acknowledgment;
+import org.springframework.stereotype.Component;
+
+import java.util.List;
+
+@Component
+public class SysKafkaData {
+
+    @Value("${kafka.enabled}")
+    private Boolean isEnable ;
+
+    @Value("${kafka.topics}")
+    private String topic;
+
+    private final KafkaUtils kafkaUtils;
+
+    @Autowired
+    public SysKafkaData(KafkaUtils kafkaUtils) {
+        this.kafkaUtils = kafkaUtils;
+    }
+
+    @PostConstruct
+    public void init() {
+        String listenerID = "kafka-listener-1";
+
+        try {
+            if(isEnable){
+                //创建监听容器
+                kafkaUtils.registerListenerContainer(listenerID, "test-consumer-group", new KafkaMessageConsumer(), KafkaMessageConsumer.class.getDeclaredMethod("consumerMessage", List.class), topic);
+                kafkaUtils.setStateNormalListenerContainer(listenerID);
+            }
+
+        }catch(Exception e){
+            e.printStackTrace();
+        };
+
+    }
+
+
+
+}

+ 382 - 0
ruoyi-admin/src/main/java/org/dromara/web/utils/KafkaUtils.java

@@ -0,0 +1,382 @@
+package org.dromara.web.utils;
+
+import org.apache.kafka.clients.admin.AdminClient;
+import org.dromara.common.core.utils.SpringUtils;
+import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.kafka.core.KafkaTemplate;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.admin.*;
+import org.apache.kafka.common.config.ConfigResource;
+import org.apache.kafka.common.config.TopicConfig;
+import org.springframework.kafka.config.KafkaListenerContainerFactory;
+import org.springframework.kafka.config.MethodKafkaListenerEndpoint;
+import org.springframework.kafka.listener.MessageListenerContainer;
+import org.springframework.messaging.handler.annotation.support.DefaultMessageHandlerMethodFactory;
+import org.springframework.stereotype.Component;
+
+import java.lang.reflect.Method;
+import java.util.Collections;
+import java.util.Set;
+
+/**
+ * @ClassName: KafkaUtil
+ * @Description: TODO 用于创建kafka Topic队列和listener监听容器的工具类
+ **/
+
+@Component
+@Slf4j
+public class KafkaUtils {
+
+    private  AdminClient adminClient;
+
+    private  KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
+    private  KafkaTemplate kafkaTemplate;
+
+    /**
+     * @Title KafkaUtil
+     * @Description 构造函数注入
+     * @param adminClient kafka客户端对象
+     * @param kafkaListenerEndpointRegistry kafka监听容器注册对象
+     * @param kafkaListenerEndpointRegistry kafka生产者工具类
+     * @return
+     */
+    @Autowired
+    public KafkaUtils(AdminClient adminClient, KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry, KafkaTemplate kafkaTemplate) {
+        this.adminClient = adminClient;
+        this.kafkaListenerEndpointRegistry = kafkaListenerEndpointRegistry;
+        this.kafkaTemplate = kafkaTemplate;
+    }
+
+    //region topic相关方法
+
+
+    /**
+     * @Title createTopic
+     * @Description 创建kafka topic
+     * @param topicName topic名
+     * @param partitions 分区数
+     * @param replicas 副本数(short)
+     * @return void
+     */
+    public  void createTopic(String topicName, int partitions, short replicas) throws Exception {
+        NewTopic newTopic = new NewTopic(topicName, partitions, replicas);
+        CreateTopicsResult topics = adminClient.createTopics(Collections.singleton(newTopic));
+        topics.all().get();
+        log.info("【{}】topic创建成功", topicName);
+    }
+
+    /**
+     * @Title deleteTopic
+     * @Description 删除topic
+     * @param topicName  topic名称
+     * @return void
+     */
+    public  void deleteTopic(String topicName) throws Exception {
+        DeleteTopicsResult deleteTopicsResult = adminClient.deleteTopics(Collections.singleton(topicName));
+        deleteTopicsResult.all().get();
+        log.info("【{}】topic删除成功", topicName);
+
+    }
+
+    /**
+     * @Title updateTopicRetention
+     * @Description 修改topic的过期时间
+     * @param topicName  topic名称
+     * @param ms  过期时间(毫秒值)
+     * @return void
+     */
+    public  void updateTopicRetention(String topicName, String ms) throws Exception {
+        ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
+        ConfigEntry configEntry = new ConfigEntry(TopicConfig.RETENTION_MS_CONFIG, ms);
+        Config config = new Config(Collections.singleton(configEntry));
+        // 创建AlterConfigsOptions
+        AlterConfigsOptions alterConfigsOptions = new AlterConfigsOptions().timeoutMs(10000);
+        // 执行修改操作
+        adminClient.alterConfigs(Collections.singletonMap(resource, config), alterConfigsOptions).all().get();
+        log.info("【{}】topic过期时间设置完成,过期时间为:{}毫秒", topicName, ms);
+    }
+
+
+    /**
+     * @Title listTopic
+     * @Description 获取topic列表
+     * @return java.util.Set<java.lang.String>
+     */
+    public  Set<String> listTopic() throws Exception {
+        ListTopicsResult listTopicsResult = adminClient.listTopics();
+        Set<String> strings = listTopicsResult.names().get();
+        return strings;
+    }
+
+
+    /**
+     * @Title existTopic
+     * @Description topic是否存在
+     * @param topicName topic名称
+     * @return boolean
+     */
+    public  boolean existTopic(String topicName) throws Exception {
+        Set<String> strings = listTopic();
+        if (strings == null || strings.isEmpty()) {
+            return false;
+        }
+        return strings.contains(topicName);
+    }
+
+
+    //endregion
+
+    //region 生产者发送消息示例
+
+    /**
+     * @Title sendMsg
+     * @Description 通过注册信息找到对应的容器并启动
+     * @param topic 队列名称
+     * @param msg 消息
+     * @return void
+     */
+    public  void sendMsg(String topic, Object msg) throws Exception {
+        kafkaTemplate.send(topic, msg);
+        //kafkaTemplate.send(topic,2,"key",msg);//带有分区和key值的
+    }
+    //endregion
+
+    //region 消费者监听容器相关方法
+
+
+    /**
+     * @Title existListenerContainer
+     * @Description TODO 根据ID查询容器是否存在
+     * @param id 监听容器id
+     * @return boolean
+     */
+    public  boolean existListenerContainer(String id) throws Exception {
+        Set<String> listenerIds = kafkaListenerEndpointRegistry.getListenerContainerIds();
+        return listenerIds.contains(id);
+    }
+
+
+    /**
+     * @Title registerListener
+     * @Description TODO  创建kafka监听容器并注册到注册信息中,一次可以注册多个topic的监听容器
+     * @param id 容器id,自定义
+     * @param consumerGroupId 消费者组id自定义
+     * @param processBean 处理消息的类
+     * @param processMethod 处理消息的方法
+     * @param topics 需要监听的topic数组
+     * @return void
+     */
+    public  void registerListenerContainer(String id, String consumerGroupId, Object processBean, Method processMethod, String... topics) throws Exception {
+        //判断id是否存在
+        if (existListenerContainer(id)) {
+            //如果当前id的容器已存在,不添加
+            log.info("当前id为{}的容器已存在,不进行添加操作!", id);
+            return;
+        }
+        //判断所有队列是否存在
+        for (String topic : topics) {
+            if (!existTopic(topic)) {
+                //如果存在topic不存在,不添加
+                log.info("【{}】topic不存在,不进行添加操作!", topic);
+                return;
+            }
+        }
+        MethodKafkaListenerEndpoint<String, String> endpoint = new MethodKafkaListenerEndpoint<>();
+        //设置监听器端点相关信息
+        //设置Id
+        endpoint.setId(id);
+        //设置消费者组
+        endpoint.setGroupId(consumerGroupId);
+        //设置要监听的topic数组,可以是多个
+        endpoint.setTopics(topics);
+        //设置每个监听器线程数
+        endpoint.setConcurrency(3);
+        //设置批量监听
+        endpoint.setBatchListener(true);
+        //设置消息处理工厂类,这里用的是默认工厂
+        endpoint.setMessageHandlerMethodFactory(new DefaultMessageHandlerMethodFactory());
+        //设置实际处理的Bean对象,即实际的对象,比如new Class();
+        endpoint.setBean(processBean);
+        //设置实际处理的方法(包含方法名和参数)
+        endpoint.setMethod(processMethod);
+
+
+        //注册Container并启动,startImmediately表示立马启动
+        kafkaListenerEndpointRegistry.registerListenerContainer(endpoint, SpringUtils.getBean(KafkaListenerContainerFactory.class), true);
+        log.info("Kafka监听容器操作:ID为{}的容器已【注册】,监听的topics:{}", id, topics);
+
+
+//        for (String topicName : topics) {
+//            if (!KafkaConfig.notExistTopicCreateContainerFlag && !nameTopics.contains(topicName)) {
+//                log.info("【{}】topic不存在,不创建容器!", topicName);
+//                continue;
+//            }
+//            //创建一个kafka监听器端点对象
+//            MethodKafkaListenerEndpoint<String, String> endpoint = new MethodKafkaListenerEndpoint<>();
+//            //设置监听器端点相关信息
+//            //设置Id
+//            endpoint.setId(topicName);
+//            //设置消费者组
+//            endpoint.setGroupId(topicName + "_consumer_group");
+//            //设置主题
+//            endpoint.setTopics(topicName);
+//            //设置每个监听器线程数
+//            endpoint.setConcurrency(3);
+//            //设置批量监听
+//            endpoint.setBatchListener(true);
+//            //设置默认处理工厂
+//            endpoint.setMessageHandlerMethodFactory(new DefaultMessageHandlerMethodFactory());
+//            //设置实际处理的Bean对象
+//            endpoint.setBean(new ConsumerController());
+//            //设置实际处理的方法名和参数类型
+//            endpoint.setMethod(ConsumerController.class.getMethod("consumeMessage", String.class));
+//            //注册Container并启动
+//            kafkaListenerEndpointRegistry.registerListenerContainer(endpoint, SpringUtil.getBean(KafkaListenerContainerFactory.class), true);
+//            log.info("Kafka监听容器操作:ID为{}的容器已【注册】", topicName);
+//        }
+    }
+
+
+    /**
+     * @Title startListenerContainer
+     * @Description 根据id开启监听容器的运行状态
+     * @param id 监听容器的id
+     * @return void
+     */
+    public  void startListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            log.info("Kafka监听容器操作:ID为{}的容器不存在,不操作!", id);
+            return;
+        }
+        listenerContainer.start();
+        log.info("Kafka监听容器操作:ID为{}的容器已【开启】", id);
+    }
+
+
+    /**
+     * @Title stopListenerContainer
+     * @Description TODO 根据id停止监听容器的运行状态
+     * @param id 监听容器的id
+     * @return void
+     */
+    public  void stopListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            log.info("Kafka监听容器操作:ID为{}的容器不存在,不操作!", id);
+            return;
+        }
+        listenerContainer.stop();
+        log.info("Kafka监听容器操作:ID为{}的容器已【停止】", id);
+    }
+
+
+    /**
+     * @Title pauseListenerContainer
+     * @Description TODO 根据id暂停监听容器的监听状态
+     * @param id 监听容器的id
+     * @return void
+     */
+    public  void pauseListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            log.info("Kafka监听容器操作:ID为{}的容器不存在,不操作!", id);
+            return;
+        }
+        listenerContainer.pause();
+        log.info("Kafka监听容器操作:ID为{}的容器已【暂停】", id);
+    }
+
+    /**
+     * @Title resumeListenerContainer
+     * @Description TODO  根据id恢复监听容器的监听状态
+     * @param id 监听容器的id
+     * @return void
+     */
+    public  void resumeListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            log.info("Kafka监听容器操作:ID为{}的容器不存在,不操作!", id);
+            return;
+        }
+        listenerContainer.resume();
+        log.info("Kafka监听容器操作:ID为{}的容器已【恢复】", id);
+    }
+
+
+    /**
+     * @Title isNormalStateListenerContainer
+     * @Description 是否是正常状态的容器
+     * (kafka监听容器的运行状态标志是running,监听状态标志是pauseRequested,停止是关闭了资源,暂停是停止消费)
+     *  只有running是true,并且pauseRequested是false,监听容器才能正常消费消息
+     * @param id 监听容器的id
+     * @return boolean
+     */
+    public  boolean isNormalStateListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        //如果不存在此id容器,则返回false
+        if (listenerContainer == null) {
+            return false;
+        }
+        //存在则返回容器的运行状态和非暂停状态
+        return listenerContainer.isRunning() && !listenerContainer.isPauseRequested();
+    }
+
+
+    /**
+     * @Title getPauseStateListenerContainer
+     * @Description 获取监听容器的暂停状态(监听的状态)
+     * @param id 监听容器id
+     * @return boolean
+     */
+    public  boolean getPauseStateListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            return true;
+        }
+        return listenerContainer.isPauseRequested();
+
+    }
+
+    /**
+     * @Title getRunningStateListenerContainer
+     * @Description 获取监听容器的运行状态(容器的状态)
+     * @param id 监听容器id
+     * @return boolean
+     */
+    public  boolean getRunningStateListenerContainer(String id) throws Exception {
+        MessageListenerContainer listenerContainer = kafkaListenerEndpointRegistry.getListenerContainer(id);
+        if (listenerContainer == null) {
+            return false;
+        }
+        return listenerContainer.isRunning();
+    }
+
+    /**
+     * @Title setStateNormalListenerContainer
+     * @Description 使容器的运行状态和监听状态都是正常
+     * @param id 监听容器的id
+     * @return boolean 正常返回true,非正常返回false
+     */
+    public  boolean setStateNormalListenerContainer(String id) throws Exception {
+        if (!existListenerContainer(id)) {
+            log.info("Kafka监听容器操作:ID为{}的容器不存在,不操作!", id);
+            return false;
+        }
+        //先判断容器运行状态是否正常,如果不正常,则开启
+        if (!getRunningStateListenerContainer(id)) {
+            startListenerContainer(id);
+        }
+        //再判断容器监听状态是否正常,如果不正常,则恢复
+        if (getPauseStateListenerContainer(id)) {
+            resumeListenerContainer(id);
+        }
+        //设置完后,再查询状态并返回。
+        return isNormalStateListenerContainer(id);
+    }
+
+    //endregion
+
+}

+ 1 - 1
ruoyi-admin/src/main/resources/application-dev.yml

@@ -98,7 +98,7 @@ spring:
 spring.data:
   redis:
     # 地址
-    host: ${REDIS_HOST:localhost}
+    host: ${REDIS_HOST:redis}
     # 端口,默认为6379
     port: ${REDIS_PORT:6379}
     # 数据库索引

+ 6 - 1
ruoyi-admin/src/main/resources/application.yml

@@ -53,10 +53,15 @@ user:
     # 密码锁定时间(默认10分钟)
     lockTime: 10
 
+kafka:
+  enabled: false
+  bootstrap-servers: ${KAFAKA_SERVER:kafka-server:9092}
+  topics: zs_objects_r2p3
 # Spring配置
 spring:
   application:
     name: RuoYi-Vue-Plus
+
   threads:
     # 开启虚拟线程 仅jdk21可用
     virtual:
@@ -270,7 +275,7 @@ websocket:
 --- # warm-flow工作流配置
 warm-flow:
   # 是否开启工作流,默认true
-  enabled: true
+  enabled: false
   # 是否开启设计器ui
   ui: true
   # 默认Authorization,如果有多个token,用逗号分隔

+ 7 - 0
ruoyi-common/ruoyi-common-oss/src/main/java/org/dromara/common/oss/core/OssClient.java

@@ -4,6 +4,7 @@ import cn.hutool.core.io.FileUtil;
 import cn.hutool.core.io.IoUtil;
 import cn.hutool.core.io.file.FileReader;
 import cn.hutool.core.io.file.PathUtil;
+import cn.hutool.core.io.resource.Resource;
 import cn.hutool.core.util.IdUtil;
 import org.apache.commons.io.FilenameUtils;
 import org.dromara.common.core.constant.Constants;
@@ -191,6 +192,12 @@ public class OssClient {
      * @throws OssException 如果上传失败,抛出自定义异常
      */
     public UploadResult upload(Path filePath, String key, String md5Digest, String contentType) {
+
+        if(isLocalEnv()) {
+            FileUtils.mkdir(getLocalEnvFile(key));
+            FileUtil.copyFile(filePath.toFile(), getLocalEnvFile(key), StandardCopyOption.REPLACE_EXISTING);
+            return UploadResult.builder().url(getUrl() + StringUtils.SLASH + key).filename(key).build();
+        }
         try {
             // 构建上传请求对象
             FileUpload fileUpload = transferManager.uploadFile(

+ 2 - 1
ruoyi-modules/ruoyi-system/src/main/java/org/dromara/system/controller/system/SysOssController.java

@@ -66,7 +66,7 @@ public class SysOssController extends BaseController {
      *
      * @param file 文件
      */
-    @SaCheckPermission("system:oss:upload")
+//    @SaCheckPermission("system:oss:upload")
     @Log(title = "OSS对象存储", businessType = BusinessType.INSERT)
     @PostMapping(value = "/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
     public R<SysOssUploadVo> upload(@RequestPart("file") MultipartFile file) {
@@ -76,6 +76,7 @@ public class SysOssController extends BaseController {
         SysOssVo oss = ossService.upload(file);
         SysOssUploadVo uploadVo = new SysOssUploadVo();
         uploadVo.setUrl(oss.getUrl());
+        uploadVo.setPurl(oss.getFileName());
         uploadVo.setFileName(oss.getOriginalName());
         uploadVo.setOssId(oss.getOssId().toString());
         return R.ok(uploadVo);

+ 5 - 0
ruoyi-modules/ruoyi-system/src/main/java/org/dromara/system/domain/TblEvent.java

@@ -33,6 +33,11 @@ public class TblEvent extends BaseEntity {
     private String addr;
 
     /**
+     * 状态
+     */
+    private String status;
+
+    /**
      * 内容
      */
     private String content;

+ 5 - 0
ruoyi-modules/ruoyi-system/src/main/java/org/dromara/system/domain/bo/TblEventBo.java

@@ -37,6 +37,11 @@ public class TblEventBo extends BaseEntity {
     private String content;
 
     /**
+     * 状态
+     */
+    private String status;
+
+    /**
      * 等级
      */
     private String level;

+ 5 - 0
ruoyi-modules/ruoyi-system/src/main/java/org/dromara/system/domain/vo/SysOssUploadVo.java

@@ -16,6 +16,11 @@ public class SysOssUploadVo {
     private String url;
 
     /**
+     * 平台相对地址
+     */
+    private String purl;
+
+    /**
      * 文件名
      */
     private String fileName;

+ 5 - 0
ruoyi-modules/ruoyi-system/src/main/java/org/dromara/system/domain/vo/TblEventVo.java

@@ -41,6 +41,11 @@ public class TblEventVo extends TblEvent implements Serializable {
     private String addr;
 
     /**
+     * 状态
+     */
+    @ExcelProperty(value = "状态")
+    private String status;
+    /**
      * 内容
      */
     @ExcelProperty(value = "内容")