Browse Source

kafka功能修改
设备树功能修改

dongjh 1 year ago
parent
commit
14e8e2f52e
18 changed files with 265 additions and 129 deletions
  1. 7 0
      jeecg-module-system/jeecg-system-start/pom.xml
  2. 39 37
      jeecg-module-system/jeecg-system-start/src/main/resources/application-dev.yml
  3. 1 0
      module_ems/pom.xml
  4. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataCurrent/entity/DataCurrent.java
  5. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataDemand/entity/DataDemand.java
  6. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataElectricity/entity/DataElectricity.java
  7. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataElectricityDay/entity/DataElectricityDay.java
  8. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataPower/entity/DataPower.java
  9. 2 2
      module_ems/src/main/java/org/jeecg/modules/dataVoltage/entity/DataVoltage.java
  10. 113 0
      module_ems/src/main/java/org/jeecg/modules/kafka/consumer/KafkaConsumers.java
  11. 22 24
      module_ems/src/main/java/org/jeecg/modules/kafka/consumer/TestConsumer.java
  12. 23 19
      module_ems/src/main/java/org/jeecg/modules/kafka/controller/TestController.java
  13. 36 36
      module_ems/src/main/java/org/jeecg/modules/kafka/producer/TestProducer.java
  14. 1 0
      module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/controller/TpmEquipmentTreeController.java
  15. 4 0
      module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/entity/TpmEquipmentTree.java
  16. 2 1
      module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/mapper/xml/TpmEquipmentTreeMapper.xml
  17. 1 0
      module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/service/impl/TpmEquipmentTreeServiceImpl.java
  18. 4 0
      module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/vo/TpmEquipmentTreeNodeVO.java

+ 7 - 0
jeecg-module-system/jeecg-system-start/pom.xml

@@ -19,6 +19,13 @@
             <groupId>org.jeecgframework.boot</groupId>
             <artifactId>module_ems</artifactId>
         </dependency>
+
+        <!--kafka依赖-->
+        <dependency>
+            <groupId>org.springframework.kafka</groupId>
+            <artifactId>spring-kafka</artifactId>
+            <version>2.8.0</version>
+        </dependency>
     </dependencies>
 
     <build>

+ 39 - 37
jeecg-module-system/jeecg-system-start/src/main/resources/application-dev.yml

@@ -36,42 +36,44 @@ spring:
             enable: true
             required: true
   ## kafka配置
-#  kafka:
-#      bootstrap-servers: 192.168.2.154:9092,192.168.2.176:9092,192.168.2.114:9092
-#      producer:
-#        # 发生错误后,消息重发的次数。
-#        retries: 0
-#        #当有多个消息需要被发送到同一个分区时,生产者会把它们放在同一个批次里。该参数指定了一个批次可以使用的内存大小,按照字节数计算。
-#        batch-size: 16384
-#        # 设置生产者内存缓冲区的大小。
-#        buffer-memory: 33554432
-#        # 键的序列化方式
-#        key-serializer: org.apache.kafka.common.serialization.StringSerializer
-#        # 值的序列化方式
-#        value-serializer: org.apache.kafka.common.serialization.StringSerializer
-#        # acks=0 : 生产者在成功写入消息之前不会等待任何来自服务器的响应。
-#        # acks=1 : 只要集群的首领节点收到消息,生产者就会收到一个来自服务器成功响应。
-#        # acks=all :只有当所有参与复制的节点全部收到消息时,生产者才会收到一个来自服务器的成功响应。
-#        acks: 0
-#      consumer:
-#        # 自动提交的时间间隔 在spring boot 2.X 版本中这里采用的是值的类型为Duration 需要符合特定的格式,如1S,1M,2H,5D
-#        auto-commit-interval: 1S
-#        # 该属性指定了消费者在读取一个没有偏移量的分区或者偏移量无效的情况下该作何处理:
-#        # latest(默认值)在偏移量无效的情况下,消费者将从最新的记录开始读取数据(在消费者启动之后生成的记录)
-#        # earliest :在偏移量无效的情况下,消费者将从起始位置读取分区的记录
-#        auto-offset-reset: earliest
-#        # 是否自动提交偏移量,默认值是true,为了避免出现重复数据和数据丢失,可以把它设置为false,然后手动提交偏移量
-#        enable-auto-commit: false
-#        # 键的反序列化方式
-#        key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
-#        # 值的反序列化方式
-#        value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
-#      listener:
-#        # 在侦听器容器中运行的线程数。
-#        concurrency: 5
-#        #listner负责ack,每调用一次,就立即commit
-#        ack-mode: manual_immediate
-#        missing-topics-fatal: false
+  kafka:
+      bootstrap-servers: 127.0.0.1:9092
+      producer:
+        # 发生错误后,消息重发的次数。
+        retries: 1
+        #当有多个消息需要被发送到同一个分区时,生产者会把它们放在同一个批次里。该参数指定了一个批次可以使用的内存大小,按照字节数计算。
+        batch-size: 16384
+        # 设置生产者内存缓冲区的大小。
+        buffer-memory: 33554432
+        # 键的序列化方式
+        key-serializer: org.apache.kafka.common.serialization.StringSerializer
+        # 值的序列化方式
+        value-serializer: org.apache.kafka.common.serialization.StringSerializer
+        # acks=0 : 生产者在成功写入消息之前不会等待任何来自服务器的响应。
+        # acks=1 : 只要集群的首领节点收到消息,生产者就会收到一个来自服务器成功响应。
+        # acks=all :只有当所有参与复制的节点全部收到消息时,生产者才会收到一个来自服务器的成功响应。
+        acks: 0
+      consumer:
+        group-id: thing___property
+        # 自动提交的时间间隔 在spring boot 2.X 版本中这里采用的是值的类型为Duration 需要符合特定的格式,如1S,1M,2H,5D
+        auto-commit-interval: 1S
+        # 该属性指定了消费者在读取一个没有偏移量的分区或者偏移量无效的情况下该作何处理:
+        # latest(默认值)在偏移量无效的情况下,消费者将从最新的记录开始读取数据(在消费者启动之后生成的记录)
+        # earliest :在偏移量无效的情况下,消费者将从起始位置读取分区的记录
+        auto-offset-reset: latest
+        # 是否自动提交偏移量,默认值是true,为了避免出现重复数据和数据丢失,可以把它设置为false,然后手动提交偏移量
+        enable-auto-commit: false
+        # 键的反序列化方式
+        key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+        # 值的反序列化方式
+        value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
+      listener:
+        # 在侦听器容器中运行的线程数。
+        concurrency: 5
+        #listner负责ack,每调用一次,就立即commit
+        ack-mode: manual_immediate
+        missing-topics-fatal: false
+      topic-patterns: "thing___.*.___property"  # 这里使用通配符来匹配多个主题
   ## quartz定时任务,采用数据库方式
   quartz:
     job-store-type: jdbc
@@ -168,7 +170,7 @@ spring:
       datasource:
         master:
           type: com.alibaba.druid.pool.DruidDataSource
-          url: jdbc:mysql://152.136.206.27:3306/ems?characterEncoding=UTF-8&useUnicode=true&useSSL=false&tinyInt1isBit=false&allowPublicKeyRetrieval=true&serverTimezone=Asia/Shanghai
+          url: jdbc:mysql://152.136.206.27:3306/phm?characterEncoding=UTF-8&useUnicode=true&useSSL=false&tinyInt1isBit=false&allowPublicKeyRetrieval=true&serverTimezone=Asia/Shanghai
           username: itdm-boot
           password: itdm-boot@2023
           driver-class-name: com.mysql.cj.jdbc.Driver

+ 1 - 0
module_ems/pom.xml

@@ -16,6 +16,7 @@
             <groupId>org.jeecgframework.boot</groupId>
             <artifactId>module_tpm</artifactId>
         </dependency>
+
     </dependencies>
 
 </project>

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataCurrent/entity/DataCurrent.java

@@ -53,8 +53,8 @@ public class DataCurrent implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**时间*/
-	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "时间")
     private java.util.Date logtime;

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataDemand/entity/DataDemand.java

@@ -53,8 +53,8 @@ public class DataDemand implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**时间*/
-	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "时间")
     private java.util.Date logtime;

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataElectricity/entity/DataElectricity.java

@@ -56,8 +56,8 @@ public class DataElectricity implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**时间*/
-	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "时间")
     private java.util.Date logtime;

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataElectricityDay/entity/DataElectricityDay.java

@@ -53,8 +53,8 @@ public class DataElectricityDay implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**计算时间*/
-	@Excel(name = "计算时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "计算时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "计算时间")
     private java.util.Date logtime;

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataPower/entity/DataPower.java

@@ -53,8 +53,8 @@ public class DataPower implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**时间*/
-	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "时间")
     private java.util.Date logtime;

+ 2 - 2
module_ems/src/main/java/org/jeecg/modules/dataVoltage/entity/DataVoltage.java

@@ -53,8 +53,8 @@ public class DataVoltage implements Serializable {
     @ApiModelProperty(value = "设备名称")
     private java.lang.String equipmentname;
 	/**时间*/
-	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd")
-	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd")
+	@Excel(name = "时间", width = 15, format = "yyyy-MM-dd HH:mm:ss")
+	@JsonFormat(timezone = "GMT+8",pattern = "yyyy-MM-dd HH:mm:ss")
     @DateTimeFormat(pattern="yyyy-MM-dd")
     @ApiModelProperty(value = "时间")
     private java.util.Date logtime;

+ 113 - 0
module_ems/src/main/java/org/jeecg/modules/kafka/consumer/KafkaConsumers.java

@@ -0,0 +1,113 @@
+package org.jeecg.modules.kafka.consumer;
+
+//import com.google.common.base.Splitter;
+//import lombok.extern.slf4j.Slf4j;
+//import org.apache.kafka.clients.consumer.ConsumerRecord;
+//import org.apache.kafka.clients.consumer.ConsumerRecords;
+//import org.apache.kafka.clients.consumer.KafkaConsumer;
+//import org.apache.kafka.common.serialization.StringDeserializer;
+//import org.jeecg.modules.tpmEquipment.entity.TpmEquipment;
+//import org.jeecg.modules.tpmEquipment.service.ITpmEquipmentService;
+//import org.springframework.beans.factory.InitializingBean;
+//import org.springframework.beans.factory.annotation.Autowired;
+//import org.springframework.stereotype.Service;
+//
+//import java.time.Duration;
+//import java.util.List;
+//import java.util.Objects;
+//import java.util.Properties;
+//import java.util.stream.Collectors;
+//
+// 这个代码暂时不用了,有更简洁的使用方法,就是TestConsumer的@KafkaListener(topicPattern = "thing___.*.___property")
+//
+///**
+// * 因为topic是动态的,重构consumer
+// */
+//@Service
+//@Slf4j
+public class KafkaConsumers {
+//public class KafkaConsumers implements InitializingBean {
+//
+//    @Autowired
+//    private ITpmEquipmentService equipmentService;
+//
+//    /**
+//     * 消费者
+//     */
+//    private static KafkaConsumer<String, String> consumer;
+//    /**
+//     * topic
+//     */
+//    private List<String> topicList;
+//
+//    public String getNewTopic() {
+//        // 从数据库中获取设备信息
+//        List<TpmEquipment> equipments = equipmentService.list();
+////        String topicList = equipments.stream().map(equipment -> "/sys/" + equipment.getSpec() + "/" +
+////                                                equipment.getEquipmentcode() + "/thing/event/property/post").
+////                                    collect(Collectors.joining(","));
+//        String topicList = equipments.stream().map(equipment -> equipment.getEquipmentcode()).
+//                collect(Collectors.joining(","));
+//        return topicList;
+//    }
+//
+//    /**
+//     * 初始化消费者(配置写死是为了快速测试,请大家使用配置文件)
+//     *
+//     * @param topicList
+//     * @return
+//     */
+//    public KafkaConsumer<String, String> getInitConsumer(List<String> topicList) {
+//        //配置信息
+//        Properties props = new Properties();
+//        //kafka服务器地址
+//        props.put("bootstrap.servers", "127.0.0.1:9092");
+//        //必须指定消费者组
+//        props.put("group.id", "grout.test");
+//        //设置数据key和value的序列化处理类
+//        props.put("key.deserializer", StringDeserializer.class);
+//        props.put("value.deserializer", StringDeserializer.class);
+//        //创建消息者实例
+//        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
+//        //订阅topic的消息
+//        consumer.subscribe(topicList);
+//        return consumer;
+//    }
+//
+//    /**
+//     * 开启消费者线程
+//     * 异常请自己根据需求自己处理
+//     */
+//    @Override
+//    public void afterPropertiesSet() {
+//        // 初始化topic
+//        topicList = Splitter.on(",").splitToList(Objects.requireNonNull(getNewTopic()));
+//        if (org.apache.commons.collections.CollectionUtils.isNotEmpty(topicList)) {
+//            consumer = getInitConsumer(topicList);
+//            // 开启一个消费者线程
+//            new Thread(() -> {
+//                while (true) {
+//                    // 模拟从配置源中获取最新的topic(字符串,逗号隔开)
+//                    final List<String> newTopic = Splitter.on(",").splitToList(Objects.requireNonNull(getNewTopic()));
+//                    // 如果topic发生变化
+//                    if (!topicList.equals(newTopic)) {
+//                        log.info("topic 发生变化:newTopic:{},oldTopic:{}-------------------------", newTopic, topicList);
+//                        // method one:重新订阅topic:
+//                        topicList = newTopic;
+//                        consumer.subscribe(newTopic);
+//                        // method two:关闭原来的消费者,重新初始化一个消费者
+//                        //consumer.close();
+//                        //topicList = newTopic;
+//                        //consumer = getInitConsumer(newTopic);
+//                        continue;
+//                    }
+//                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
+//                    for (ConsumerRecord<String, String> record : records) {
+//                        System.out.println("key:" + record.key() + "" + ",value:" + record.value());
+//                    }
+//                }
+//            }).start();
+//        }
+//    }
+}
+

+ 22 - 24
module_ems/src/main/java/org/jeecg/modules/kafka/consumer/TestConsumer.java

@@ -1,28 +1,26 @@
 package org.jeecg.modules.kafka.consumer;
 
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.springframework.kafka.annotation.KafkaListener;
+import org.springframework.stereotype.Component;
 
-//import org.apache.kafka.clients.consumer.ConsumerRecord;
-//import org.jeecg.kafka.producer.TestProducer;
-//import org.springframework.kafka.annotation.KafkaListener;
-//import org.springframework.stereotype.Component;
+@Component
+public class TestConsumer {
 
-//@Component
-//public class TestConsumer {
-//
-//    /**
-//     * 指定一个消费者组,一个主题主题。
-//     */
-//    @KafkaListener(topics = TestProducer.TOPIC_TEST,groupId = TestProducer.GROUP_TEST)
-//    public void simpleConsumer(ConsumerRecord<String, Object> record) {
-//        System.out.println("进入simpleConsumer方法");
-//        System.out.printf(
-//                "分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,创建消息的时间戳 =%d%n",
-//                record.partition(),
-//                record.offset(),
-//                record.key(),
-//                record.value(),
-//                record.timestamp()
-//        );
-//    }
-//
-//}
+    /**
+     * 指定一个消费者组,一个主题主题。
+     * topicPattern有个问题,如果在本程序运行过程中新增了topic,则监听不到,需要重新启动本程序才可以
+     */
+//    @KafkaListener(topicPattern = "thing___.*.___property")
+    @KafkaListener(topicPattern = "#{'${spring.kafka.topic-patterns}'}")
+    public void simpleConsumer(ConsumerRecord<?, ?> record) {
+        System.out.println("进入simpleConsumer方法");
+        System.out.printf("topic2.* = %s, 偏移量 = %d, key = %s, 内容 = %s, 创建消息的时间戳 =%d%n \n",
+                record.topic(),
+                record.offset(),
+                record.key(),
+                record.value(),
+                record.timestamp()
+        );
+    }
+}

+ 23 - 19
module_ems/src/main/java/org/jeecg/modules/kafka/controller/TestController.java

@@ -1,22 +1,26 @@
 package org.jeecg.modules.kafka.controller;
 
 
-//import org.jeecg.kafka.producer.TestProducer;
-//import org.springframework.web.bind.annotation.GetMapping;
-//import org.springframework.web.bind.annotation.RequestMapping;
-//import org.springframework.web.bind.annotation.RestController;
-//
-//import javax.annotation.Resource;
-//
-//@RestController
-//@RequestMapping("/kafka/test")
-//public class TestController {
-//
-//    @Resource
-//    private TestProducer testProducer;
-//
-//    @GetMapping("/send")
-//    public void sendMsg(){
-//        testProducer.send("------------测试消息-----------");
-//    }
-//}
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.jeecg.modules.kafka.producer.TestProducer;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.annotation.Resource;
+
+@Api(tags="kafka")
+@RestController
+@RequestMapping("/kafka/test")
+public class TestController {
+
+    @Resource
+    private TestProducer testProducer;
+
+    @ApiOperation(value="发送消息", notes="发送消息")
+    @GetMapping("/send")
+    public void sendMsg(String topicname, String obj){
+        testProducer.send(topicname, obj);
+    }
+}

+ 36 - 36
module_ems/src/main/java/org/jeecg/modules/kafka/producer/TestProducer.java

@@ -1,44 +1,44 @@
 package org.jeecg.modules.kafka.producer;
 
-//import com.alibaba.fastjson.JSON;
-//import lombok.extern.slf4j.Slf4j;
-//import org.jetbrains.annotations.NotNull;
-//import org.springframework.kafka.core.KafkaTemplate;
-//import org.springframework.kafka.support.SendResult;
-//import org.springframework.stereotype.Component;
-//import org.springframework.util.concurrent.ListenableFuture;
-//import org.springframework.util.concurrent.ListenableFutureCallback;
+import com.alibaba.fastjson.JSON;
+import lombok.extern.slf4j.Slf4j;
+import org.jetbrains.annotations.NotNull;
+import org.springframework.kafka.core.KafkaTemplate;
+import org.springframework.kafka.support.SendResult;
+import org.springframework.stereotype.Component;
+import org.springframework.util.concurrent.ListenableFuture;
+import org.springframework.util.concurrent.ListenableFutureCallback;
 
 import javax.annotation.Resource;
 
-//@Component
-//@Slf4j
-//public class TestProducer {
-//
-//    @Resource
-//    private KafkaTemplate<String, Object> kafkaTemplate;
-//
-//    //自定义topic
+@Component
+@Slf4j
+public class TestProducer {
+
+    @Resource
+    private KafkaTemplate<String, Object> kafkaTemplate;
+
+    //自定义topic
 //    public static final String TOPIC_TEST = "visible";
-//
 //    public static final String GROUP_TEST = "grout.test";
-//    public void send(Object obj) {
-//        String obj2String = JSON.toJSONString(obj);
-//        log.info("准备发送消息为:{}", obj2String);
-//        //发送消息
-//        ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send("TOPIC_TEST", obj);
-//        future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
-//            @Override
-//            public void onFailure(@NotNull Throwable throwable) {
-//                //发送失败的处理
-//                log.info(TOPIC_TEST + " - 生产者 发送消息失败:" + throwable.getMessage());
-//            }
 //
-//            @Override
-//            public void onSuccess(SendResult<String, Object> stringObjectSendResult) {
-//                //成功的处理
-//                log.info(TOPIC_TEST + " - 生产者 发送消息成功:" + stringObjectSendResult.toString());
-//            }
-//        });
-//    }
-//}
+    public void send(String topicname, Object obj) {
+        String obj2String = JSON.toJSONString(obj);
+        log.info("准备发送消息为:{}", obj2String);
+        //发送消息
+        ListenableFuture<SendResult<String, Object>> future = kafkaTemplate.send(topicname, obj);
+        future.addCallback(new ListenableFutureCallback<SendResult<String, Object>>() {
+            @Override
+            public void onFailure(@NotNull Throwable throwable) {
+                //发送失败的处理
+                log.info(topicname + " - 生产者 发送消息失败:" + throwable.getMessage());
+            }
+
+            @Override
+            public void onSuccess(SendResult<String, Object> stringObjectSendResult) {
+                //成功的处理
+                log.info(topicname + " - 生产者 发送消息成功:" + stringObjectSendResult.toString());
+            }
+        });
+    }
+}

+ 1 - 0
module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/controller/TpmEquipmentTreeController.java

@@ -44,6 +44,7 @@ public class TpmEquipmentTreeController extends JeecgController<TpmEquipmentTree
 	 public Result<List<TpmEquipmentTreeNodeVO>> treeList(TpmEquipmentTree tpmEquipmentTree){
 		 if(tpmEquipmentTree.getName()!=null && !"".equals(tpmEquipmentTree.getName())){
 			 QueryWrapper<TpmEquipmentTree> queryWrapper = new QueryWrapper<>();
+			 queryWrapper.orderByAsc("sortorder");
 			 queryWrapper.like(tpmEquipmentTree.getName() != null && !tpmEquipmentTree.getName().equals("name"),"name",tpmEquipmentTree.getName());
 			 List<TpmEquipmentTree> list = tpmEquipmentTreeService.list(queryWrapper);
 			 List<TpmEquipmentTreeNodeVO> list1 = TpmEquipmentTreeConvert.INSTANCE.convert(list);

+ 4 - 0
module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/entity/TpmEquipmentTree.java

@@ -64,5 +64,9 @@ public class TpmEquipmentTree implements Serializable {
 	@Dict(dicCode = "yn")
     @ApiModelProperty(value = "是否有子节点")
     private java.lang.String hasChild;
+    /**排序*/
+    @Excel(name = "排序")
+    @ApiModelProperty(value = "排序")
+    private java.lang.Integer sortorder;
 
 }

+ 2 - 1
module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/mapper/xml/TpmEquipmentTreeMapper.xml

@@ -12,11 +12,12 @@
 		<result property="updateBy" column="update_by"/>
 		<result property="updateTime" column="update_time"/>
 		<result property="parentName" column="parent_name"/>
+		<result property="sortorder" column="sortorder"/>
 	</resultMap>
 
 	<!--获取设备树及设备列表-->
 	<select id="selectTreeAndEquipList" parameterType="org.jeecg.modules.tpmEquipmentTree.entity.TpmEquipmentTree" resultMap="EquipmentTreeResult">
-		select d.id, d.parentid, d.name
+		select d.id, d.parentid, d.name, d.sortorder
 		from tpm_equipment_tree d
 		union
 		select e.id as equipmenttreeid, e.equipmenttreeid as parentid, e.equipmentname as name

+ 1 - 0
module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/service/impl/TpmEquipmentTreeServiceImpl.java

@@ -40,6 +40,7 @@ public class TpmEquipmentTreeServiceImpl extends ServiceImpl<TpmEquipmentTreeMap
         TpmEquipmentTree equipmentTree = new TpmEquipmentTree();
         equipmentTree.setParentid(pid);
         QueryWrapper<TpmEquipmentTree> queryWrapper = new QueryWrapper(equipmentTree);
+        queryWrapper.orderByAsc("sortorder");
         List<TpmEquipmentTree> list = tpmEquipmentTreeMapper.selectList(queryWrapper);
 
         if(list!=null && list.size()>0){

+ 4 - 0
module_tpm/src/main/java/org/jeecg/modules/tpmEquipmentTree/vo/TpmEquipmentTreeNodeVO.java

@@ -33,6 +33,10 @@ public class TpmEquipmentTreeNodeVO implements Serializable {
     @Dict(dicCode = "yn")
     @ApiModelProperty(value = "是否有子节点")
     private java.lang.String hasChild;
+    /**排序*/
+    @Excel(name = "排序")
+    @ApiModelProperty(value = "排序")
+    private java.lang.Integer sortorder;
 
     /**
      *