diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/src/main/resources/kafkaSecurityMode b/src/main/resources/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/src/main/resources/kafkaSecurityMode b/src/main/resources/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/krb5.conf b/src/main/resources/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/src/main/resources/kafkaSecurityMode b/src/main/resources/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/krb5.conf b/src/main/resources/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/src/main/resources/kafkaSecurityMode b/src/main/resources/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/krb5.conf b/src/main/resources/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/src/main/resources/producer.properties b/src/main/resources/producer.properties new file mode 100644 index 0000000..5e6446a --- /dev/null +++ b/src/main/resources/producer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +acks = 1 +bootstrap.servers = 192.168.65.16:21007,192.168.65.15:21007,192.168.65.14:21007 +sasl.kerberos.service.name = kafka diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..f88f517 --- /dev/null +++ b/pom.xml @@ -0,0 +1,296 @@ + + + 4.0.0 + + com.casic.yizhuang + yizhuang + 1.0-SNAPSHOT + + 3.6.3-hw-ei-312005 + 2.4.0-hw-ei-312005 + UTF-8 + + + + + + io.netty + netty-all + 4.1.31.Final + + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + mysql + mysql-connector-java + 8.0.16 + + + + + com.alibaba + fastjson + 1.2.58 + + + + + org.quartz-scheduler + quartz + 2.3.0 + + + + + net.sf.ucanaccess + ucanaccess + 4.0.1 + + + + org.apache.kafka + kafka-clients + 0.11.0.0 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + org.apache.zookeeper + zookeeper + + + net.sf.jopt-simple + jopt-simple + + + com.huawei.mrs + manager-wc2frm + + + org.apache.kafka + kafka-clients + + + org.xerial.snappy + snappy-java + + + com.huawei.mrs + om-controller-api + + + com.101tec + zkclient + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + org.xerial.snappy + snappy-java + + + net.jpountz.lz4 + lz4 + + + com.huawei.mrs + manager-hadoop-security-crypter + + + + + org.apache.zookeeper + zookeeper + ${zookeeper.version} + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + + + commons-cli + commons-cli + + + log4j + log4j + + + log4j + apache-log4j-extras + + + com.huawei.hadoop.dynalogger + dynalogger + + + io.netty + netty + + + net.java.dev.javacc + javacc + + + + + log4j + log4j + 1.2.17-atlassian-13 + + + org.slf4j + slf4j-log4j12 + 1.7.30 + + + org.apache.kafka + kafka-streams-examples + ${kafka.version} + + + org.apache.kafka + connect-json + + + org.slf4j + slf4j-log4j12 + + + org.apache.kafka + kafka-streams + + + + + org.apache.kafka + kafka-streams + ${kafka.version} + + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.apache.kafka + connect-json + + + + + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + huaweicloudsdk + https://mirrors.huaweicloud.com/repository/maven/huaweicloudsdk/ + + true + + + true + + + + central + Maven Central + https://repo1.maven.org/maven2/ + + + + + + bigdata + http://wlg1.artifactory.cd-cloud-artifact.tools.huawei.com/artifactory/cbu-maven-public/ + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + org.apache.maven.plugins + maven-shade-plugin + 1.2.1 + + + package + + shade + + + + + com.casic.yizhuang.Main + + + + + + + + + + + + + + + src/main/resources + + **/* + + + + + + \ No newline at end of file diff --git a/src/META-INF/MANIFEST.MF b/src/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/java/com/casic/yizhuang/Main.java b/src/main/java/com/casic/yizhuang/Main.java new file mode 100644 index 0000000..408006f --- /dev/null +++ b/src/main/java/com/casic/yizhuang/Main.java @@ -0,0 +1,40 @@ +package com.casic.yizhuang; + +import com.casic.yizhuang.core.Server; +import com.casic.yizhuang.kafka.KafkaClient; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Main { + + private static final Logger logger = LoggerFactory.getLogger(Main.class); + public static void main(String[] args) throws Exception { + + // 初始化日志路径 + String path=System.getProperty("user.dir"); + System.setProperty("log.base",path); + System.out.println(path); + + logger.info("Start scheduler"); + try { + new Scheduler().start(); + } catch (SchedulerException e) { + e.printStackTrace(); + } + + System.out.println("Start server"); + logger.info("Start server"); + new Thread(new Server()).start(); + + System.out.println("Start Kafka Consume"); + logger.info("Start Kafka Consume"); + + Producer.send("怎么回事儿"); + new KafkaClient().kafkaDataConsumer(); + } + +} + diff --git a/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java new file mode 100644 index 0000000..f22f604 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/AccessDBUtils.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.access; + +import java.sql.*; + +public class AccessDBUtils { + + private static final String dbURL = "jdbc:ucanaccess://" + + //"D:\\WeChat Files\\wxid_jcvfr68ppkhi11\\FileStorage\\File\\2020-07\\dat.mdb"; + "C:\\Program Files (x86)\\WeatherStation\\Data\\Access\\dat.mdb"; + + /* + * 加载驱动 + */ + static { + try { + // Step 1: Loading or registering Oracle JDBC driver class + Class.forName("net.ucanaccess.jdbc.UcanaccessDriver"); + } catch (ClassNotFoundException cnfex) { + System.out.println("Problem in loading or registering MS Access JDBC driver"); + cnfex.printStackTrace(); + } + } + + //建立连接 + public static Connection getConn() { + try { + // Step 2: Opening database connection + // Step 2.A: Create and get connection using DriverManager class + return DriverManager.getConnection(dbURL); + } catch (Exception e) { + System.out.println("AccessDB connection fail"); + e.printStackTrace(); + } + return null; + } + + // 关闭资源 + public static void close(Connection con, PreparedStatement ps, ResultSet rs) { + try { + if (rs != null) + rs.close();// 这里出现异常了,rs关闭了吗?,如果没有怎么解决,ps , con也是一样的。 + } catch (SQLException e) { + e.printStackTrace(); + } finally { + try { + if (ps != null) + ps.close(); + } catch (SQLException e) { + e.printStackTrace(); + } finally { + if (con != null) + try { + con.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + } +} diff --git a/src/main/java/com/casic/yizhuang/access/DBUtils.java b/src/main/java/com/casic/yizhuang/access/DBUtils.java new file mode 100644 index 0000000..56c9649 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/access/DBUtils.java @@ -0,0 +1,147 @@ +package com.casic.yizhuang.access; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class DBUtils { + /** + * 增加、删除、改 + * + * @param sql sql + * @param params 参数 + * @return 添加结果 + */ + public static boolean update(String sql, List params) throws SQLException { + int result = -1; + Connection conn = null; + PreparedStatement ps = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null;//直接抛异常 + ps = conn.prepareStatement(sql); + int index = 1; + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + result = ps.executeUpdate(); + } catch (Exception e) { + e.printStackTrace(); + try { + assert conn != null; + conn.rollback(); + } catch (SQLException e1) { + e1.printStackTrace(); + } + throw e; + } finally { + AccessDBUtils.close(conn, ps, null); + } + return result > 0; + } + + /** + * 查询多条记录 + * + * @param sql sql + * @param params 参数 + * @return 查询结果 + */ + public static List> select(String sql, List params) throws SQLException { + List> list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int col_len = metaData.getColumnCount(); + Map map = null; + while (rs.next()) { + map = new HashMap<>(); + for (int i = 0; i < col_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + map.put(cols_name, cols_value); + } + list.add(map); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + + /** + * 通过反射机制查询多条记录 + * + * @param sql sql + * @param params 参数 + * @param clazz 类 + * @return 查询结果 + */ + public static List select(String sql, List params, + Class clazz) throws SQLException, NoSuchFieldException, InstantiationException, IllegalAccessException { + List list = new ArrayList<>(); + int index = 1; + Connection conn = null; + PreparedStatement ps = null; + ResultSet rs = null; + try { + conn = AccessDBUtils.getConn(); + assert conn != null; + ps = conn.prepareStatement(sql); + if (params != null && !params.isEmpty()) { + for (Object param : params) { + ps.setObject(index++, param); + } + } + rs = ps.executeQuery(); + ResultSetMetaData metaData = rs.getMetaData(); + int cols_len = metaData.getColumnCount(); + T t; + while (rs.next()) { + //通过反射机制创建一个实例 + t = clazz.newInstance(); + for (int i = 0; i < cols_len; i++) { + String cols_name = metaData.getColumnName(i + 1); + Object cols_value = rs.getObject(cols_name); + if (cols_value == null) { + cols_value = ""; + } + Field field = clazz.getDeclaredField(cols_name);//获取对象属性 + field.setAccessible(true); //打开javabean的访问权限 + field.set(t, cols_value); + } + list.add(t); + } + } catch (Exception e) { + e.printStackTrace(); + throw e; + } finally { + AccessDBUtils.close(conn, ps, rs); + } + return list; + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Decoder.java b/src/main/java/com/casic/yizhuang/core/Decoder.java new file mode 100644 index 0000000..9808cb5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Decoder.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.core; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.Charset; +import java.util.List; + +public class Decoder extends MessageToMessageDecoder { + + private static final Logger logger = LoggerFactory.getLogger(Decoder.class); + + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List out) { + logger.info("receive : " + msg.toString(Charset.defaultCharset())); + out.add(msg.toString(Charset.defaultCharset())); + } + +} diff --git a/src/main/java/com/casic/yizhuang/core/Server.java b/src/main/java/com/casic/yizhuang/core/Server.java new file mode 100644 index 0000000..dfae7e2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/Server.java @@ -0,0 +1,49 @@ +package com.casic.yizhuang.core; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.codec.LineBasedFrameDecoder; +import io.netty.handler.codec.string.StringDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server implements Runnable { + private static final Logger logger = LoggerFactory.getLogger(Server.class); + + private int port = 11620; + + public void run() { + EventLoopGroup bossGroup = new NioEventLoopGroup(); + EventLoopGroup workerGroup = new NioEventLoopGroup(); + logger.info("server bind port = {}", port); + + ServerBootstrap b = new ServerBootstrap(); + b.group(bossGroup, workerGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_BACKLOG, 1024) + .childHandler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel socketChannel) { + socketChannel.pipeline().addLast(new LineBasedFrameDecoder(1024)); + socketChannel.pipeline().addLast(new StringDecoder()); + socketChannel.pipeline().addLast(new ServerHandler()); + } + }); + + try { + ChannelFuture channelFuture = b.bind(port).sync(); + channelFuture.channel().closeFuture().sync(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + workerGroup.shutdownGracefully(); + bossGroup.shutdownGracefully(); + } + } +} diff --git a/src/main/java/com/casic/yizhuang/core/ServerHandler.java b/src/main/java/com/casic/yizhuang/core/ServerHandler.java new file mode 100644 index 0000000..88ed7cd --- /dev/null +++ b/src/main/java/com/casic/yizhuang/core/ServerHandler.java @@ -0,0 +1,112 @@ +package com.casic.yizhuang.core; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.FlowmeterMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Map; + + +public class ServerHandler extends ChannelInboundHandlerAdapter { + + private static final Logger logger = LoggerFactory.getLogger(ServerHandler.class); + + @Override + public void channelRegistered(ChannelHandlerContext ctx) throws Exception { + super.channelRegistered(ctx); + } + + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + super.channelActive(ctx); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + super.channelRead(ctx, msg); + logger.info("[{}] : msg = {}", ctx.channel().remoteAddress(), msg); + String body = (String) msg; + +// ByteBuf buf = (ByteBuf) msg; +// byte[] req = new byte[buf.readableBytes()]; +// buf.readBytes(req); +// String b = new String(req,"ascii"); +// logger.info("The server receive:", b); + + FlowmeterMessage message = JSON.parseObject(body, FlowmeterMessage.class); + String devcode = message.getDevCode(); + if (devcode.length() == 1) { + devcode = "712019121200" + devcode; + } else if (devcode.length() == 2) { + devcode = "71201912120" + devcode; + } else { + + } + + Class wellInfoClass = WellInfo.class; + //根据设备编号,查询设备点位编号 + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + //获取数据内容 + MBody mBody = message.getMBody(); + List datas = mBody.getDatas(); + if (datas == null || datas.size() <= 0) { + logger.error("The mBody is empty!"); + return; + } + + //获取上传时间 + String logtime = mBody.getLogTime(); + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); + Date logTime = sdf.parse(logtime); + //向流量表批量插入数据 + for (Object data : datas) { + Flowmeter flowmeter = JSON.toJavaObject((JSONObject) data, Flowmeter.class); + String standardStatus = standardStatusBuilder(flowmeter); + StandardData standardData = new StandardData(devcode, "Flow", standardStatus, logtime); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_FLOWMETER, devcode, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + } + } + + //构建数据状态 + private String standardStatusBuilder(Flowmeter flowmeter) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getWaterLevel()), "Level"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getInstantFlow()), "InsFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTotalFlow()), "TotFlow")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getFlowVelocity()), "Speed")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(flowmeter.getTemperature()), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + super.exceptionCaught(ctx, cause); + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/json/BigDataMessage.java b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java new file mode 100644 index 0000000..9a0812c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/BigDataMessage.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.json; + +public class BigDataMessage { + + private String devID; + private String devType; + private String provider; + private String status; + private String logTime; + + + public String getDevID() { + return devID; + } + + public void setDevID(String devID) { + this.devID = devID; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getProvider() { + return provider; + } + + public void setProvider(String provider) { + this.provider = provider; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java new file mode 100644 index 0000000..e24b043 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/FlowmeterMessage.java @@ -0,0 +1,50 @@ +package com.casic.yizhuang.json; + +public class FlowmeterMessage { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private String ts; + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public String getTs() { + return ts; + } + + public void setTs(String ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/MBody.java b/src/main/java/com/casic/yizhuang/json/MBody.java new file mode 100644 index 0000000..bdf1a79 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/MBody.java @@ -0,0 +1,44 @@ +package com.casic.yizhuang.json; + +import java.util.List; + +public class MBody { + + private String bType; + private Integer cell; + private List datas; + private String logTime; + + public String getbType() { + return bType; + } + + public void setbType(String bType) { + this.bType = bType; + } + + public String getLogTime() { + return logTime; + } + + public void setLogTime(String logTime) { + this.logTime = logTime; + } + + + public List getDatas() { + return datas; + } + + public void setDatas(List datas) { + this.datas = datas; + } + + public Integer getCell() { + return cell; + } + + public void setCell(Integer cell) { + this.cell = cell; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/Message.java b/src/main/java/com/casic/yizhuang/json/Message.java new file mode 100644 index 0000000..1582918 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Message.java @@ -0,0 +1,60 @@ +package com.casic.yizhuang.json; + +public class Message { + + private String mType; + private String devType; + private String devCode; + private MBody mBody; + private boolean kafkaDataFlag; + private Long ts; + + //水质的暂时不用这个做标记位 + public boolean getKafkaDataFlag() { + return kafkaDataFlag; + } + + public void setKafkaDataFlag(boolean kafkaDataFlag) { + this.kafkaDataFlag = kafkaDataFlag; + } + + public String getMType() { + return mType; + } + + public void setMType(String mType) { + this.mType = mType; + } + + public String getDevType() { + return devType; + } + + public void setDevType(String devType) { + this.devType = devType; + } + + public String getDevCode() { + return devCode; + } + + public void setDevCode(String devCode) { + this.devCode = devCode; + } + + public Long getTs() { + return ts; + } + + public void setTs(Long ts) { + this.ts = ts; + } + + public MBody getMBody() { + return mBody; + } + + public void setMBody(MBody mBody) { + this.mBody = mBody; + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/json/Status.java b/src/main/java/com/casic/yizhuang/json/Status.java new file mode 100644 index 0000000..651007e --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/Status.java @@ -0,0 +1,23 @@ +package com.casic.yizhuang.json; + +public class Status { + private String key; + private Float value; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + + public Float getValue() { + return value; + } + + public void setValue(Float value) { + this.value = value; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Locator.java b/src/main/java/com/casic/yizhuang/json/device/Locator.java new file mode 100644 index 0000000..bf82514 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Locator.java @@ -0,0 +1,32 @@ +package com.casic.yizhuang.json.device; + +public class Locator { + private Float longitude; + private Float latitude; + private String uptime; + + + public Float getLongitude() { + return longitude; + } + + public void setLongitude(Float longitude) { + this.longitude = longitude; + } + + public Float getLatitude() { + return latitude; + } + + public void setLatitude(Float latitude) { + this.latitude = latitude; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/TempHumi.java b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java new file mode 100644 index 0000000..e8b3d4d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/TempHumi.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.json.device; + +public class TempHumi { + private Float temperature; + private Float humidity; + private String uptime; + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getHumidity() { + return humidity; + } + + public void setHumidity(Float humidity) { + this.humidity = humidity; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/WasteGas.java b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java new file mode 100644 index 0000000..8810368 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/WasteGas.java @@ -0,0 +1,67 @@ +package com.casic.yizhuang.json.device; + +public class WasteGas { + private Float h2s; + private Float co; + private Float o2; + private Float ch4; + private Float power; + private Boolean liquidSwitch; + private String uptime; + + public Float getH2S() { + return h2s; + } + + public void setH2S(Float h2s) { + this.h2s = h2s; + } + + public Float getCO() { + return co; + } + + public void setCO(Float co) { + this.co = co; + } + + public Float getO2() { + return o2; + } + + public void setO2(Float o2) { + this.o2 = o2; + } + + public Float getCH4() { + return ch4; + } + + public void setCH4(Float ch4) { + this.ch4 = ch4; + } + + public Float getPower() { + return power; + } + + public void setPower(Float power) { + this.power = power; + } + + public Boolean getLiquidSwitch() { + return liquidSwitch; + } + + public void setLiquidSwitch(Boolean liquidSwitch) { + this.liquidSwitch = liquidSwitch; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/json/device/Well.java b/src/main/java/com/casic/yizhuang/json/device/Well.java new file mode 100644 index 0000000..069fbe7 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/json/device/Well.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.json.device; + +public class Well { + private String value; + private String uptime; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getUptime() { + return uptime; + } + + public void setUptime(String uptime) { + this.uptime = uptime; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Consumer.java b/src/main/java/com/casic/yizhuang/kafka/Consumer.java new file mode 100644 index 0000000..f22f9bc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Consumer.java @@ -0,0 +1,156 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Properties; + + +public class Consumer extends ShutdownableThread { + private static final Logger LOG = LoggerFactory.getLogger(Consumer.class); + + private final KafkaConsumer consumer; + + private final String topic; + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + /** + * 用户自己申请的机机账号keytab文件名称 + */ + private static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + private static final String USER_PRINCIPAL = "kafkauser"; + + /** + * Consumer构造函数 + * + * @param topic 订阅的Topic名称 + */ + public Consumer(String topic) { + super("KafkaConsumerExample", false); + Properties props = initProperties(); + consumer = new KafkaConsumer(props); + this.topic = topic; + // 订阅 + consumer.subscribe(Collections.singletonList(this.topic)); + } + + public static Properties initProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + LOG.error("-------------"+props.toString()); + return props; + } + + + /** + * 订阅Topic的消息处理函数 + */ + public void doWork() { + // 消息消费请求 + ConsumerRecords records = consumer.poll(waitTime); + // 消息处理 + for (ConsumerRecord record : records) { + LOG.info("[ConsumerExample], Received message: (" + record.key() + ", " + record.value() + + ") at offset " + record.offset()); + } + } + + public static void main(String[] args) { + if (LoginUtil.isSecurityModel()) { + try { + LOG.info("Securitymode start."); + + // !!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + LoginUtil.securityPrepare(USER_PRINCIPAL, USER_KEYTAB_FILE); + } catch (IOException e) { + LOG.error("Security prepare failure."); + LOG.error("The IOException occured.", e); + return; + } + LOG.info("Security prepare success."); + } + + Consumer consumerThread = new Consumer(KafkaProperties.REVICE_DATA_TOPIC); + consumerThread.start(); + + // 等到60s后将consumer关闭,实际执行过程中可修改 + try { + Thread.sleep(60000); + } catch (InterruptedException e) { + LOG.info("The InterruptedException occured : {}.", e); + } finally { + consumerThread.shutdown(); + consumerThread.consumer.close(); + } + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java new file mode 100644 index 0000000..a50b3fc --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaClient.java @@ -0,0 +1,233 @@ +package com.casic.yizhuang.kafka; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.casic.yizhuang.json.BigDataMessage; +import com.casic.yizhuang.json.MBody; +import com.casic.yizhuang.json.Message; +import com.casic.yizhuang.json.Status; +import com.casic.yizhuang.json.device.Locator; +import com.casic.yizhuang.json.device.TempHumi; +import com.casic.yizhuang.json.device.WasteGas; +import com.casic.yizhuang.json.device.Well; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.netty.Client; +import com.casic.yizhuang.util.Common; +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; + +public class KafkaClient { + private static final Logger logger = LoggerFactory.getLogger(KafkaClient.class); + + + private KafkaConsumer consumer; + + public void kafkaDataConsumer() throws Exception { + +// Properties props = new Properties(); +// props.put("bootstrap.servers", "10.10.4.109:21005,10.10.4.110:21005,10.10.4.111:21005"); +// props.put("group.id", "ConsumerXX"); +// props.put("enable.auto.commit", "true"); +// props.put("auto.commit.interval.ms", "1000"); +// props.put("key.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// props.put("value.deserializer", +// "org.apache.kafka.common.serialization.StringDeserializer"); +// KafkaConsumer consumer = new KafkaConsumer<>(props); +// consumer.subscribe(Arrays.asList("MSGQUEUE_8204")); + logger.info("Securitymode start."); + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.consumerInitProperties(); + consumer = new KafkaConsumer(props); + // 订阅 + consumer.subscribe(Collections.singletonList(KafkaProperties.REVICE_DATA_TOPIC)); + + while (true) { + ConsumerRecords records = consumer.poll(100); + for (ConsumerRecord record : records) { + String msg = record.value(); + // msg 为取得的一条实时数据。消费这条数据,如输出到STDOUT + logger.info("kafka接收数据-----"+msg); + // String msg = "{\"Status\":\"[{\\\"Value\\\":7.7,\\\"Key\\\":\\\"Temp\\\"},{\\\"Value\\\":99.9,\\\"Key\\\":\\\"Humi\\\"},{\\\"Value\\\":100,\\\"Key\\\":\\\"Power\\\"}]\",\"DevType\":\"AirTempHumi\",\"LogTime\":\"2020-03-16 08:47:13\",\"DevID\":\"79WGX7\",\"Provider\":\"KaiNa\"}"; + if (msg.contains("ChangFeng")|| !msg.contains("Status")) { + return; + } + + try { + BigDataMessage message = JSON.parseObject(msg, BigDataMessage.class); + logger.info(message.getDevID()); + + String devId = message.getDevID(); + String logTime = message.getLogTime().replaceAll("-|:| ", ""); + + String strStatus = message.getStatus(); + JSONArray statuses = JSONArray.parseArray(strStatus); + + Map statusMap = new HashMap<>(); + + for (Object data : statuses) { + Status status = JSON.toJavaObject((JSONObject) data, Status.class); + statusMap.put(status.getKey(), status.getValue()); + } + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devId); + + String wellcode = ""; + if (!wellInfoList.isEmpty()) { + wellcode = wellInfoList.get(0).getWellcode(); + } + + String strJson = ""; + MBody mBody = new MBody(); + List datas = new ArrayList<>(); + + Integer cell = statusMap.get("Power").intValue(); + mBody.setLogTime(logTime); + + Message m = new Message(); + + switch (message.getDevType()) { + case "HarmfulGas"://有害气体 + mBody.setbType("WasteGasData"); + + WasteGas wasteGas = new WasteGas(); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setCO(statusMap.get("CO")); + wasteGas.setO2(statusMap.get("O2")); + wasteGas.setCH4(statusMap.get("CH4")); + wasteGas.setH2S(statusMap.get("H2S")); + wasteGas.setUptime(logTime); + + datas.add(wasteGas); + + m.setMType("Data"); + m.setDevType("WasteGas"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "AirTempHumi"://温湿度 + mBody.setbType("TempHumiData"); + mBody.setCell(cell); + + TempHumi tempHumi = new TempHumi(); + tempHumi.setTemperature(statusMap.get("Temp")); + tempHumi.setHumidity(statusMap.get("Humi")); + tempHumi.setUptime(logTime); + + datas.add(tempHumi); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("TempHumi"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + new Client().send(strJson + "\r\n"); + + break; + case "ManholeCover": + mBody.setbType("WellData"); + + Well well = new Well(); + if (statusMap.get("Status") == 0) { + well.setValue("00"); + well.setUptime(logTime); + } else { + break; + } + + datas.add(well); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Well"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Location": + mBody.setbType("LocatorData"); + + Locator locator = new Locator(); + locator.setLongitude(statusMap.get("Lon")); + locator.setLatitude(statusMap.get("Lat")); + locator.setUptime(logTime); + + datas.add(locator); + mBody.setDatas(datas); + + m.setMType("Data"); + m.setDevType("Locator"); + m.setDevCode(devId); + m.setMBody(mBody); + m.setTs(0L); + m.setKafkaDataFlag(true); + + strJson = JSON.toJSONString(m); + System.out.println(strJson); + + new Client().send(strJson + "\r\n"); + + break; + case "Flow"://流量监测仪 + Flowmeter flowmeter = new Flowmeter(); + flowmeter.setInstantFlow(statusMap.get("InsFlow")); + flowmeter.setTotalFlow(statusMap.get("TotFlow")); + flowmeter.setFlowVelocity(statusMap.get("Speed")); + flowmeter.setWaterLevel(statusMap.get("Level")); + flowmeter.setTemperature(statusMap.get("Temp")); + + new DAO<>().Update(Common.INSERT_FLOWMETER, devId, wellcode, flowmeter.getWaterLevel(), flowmeter.getFlowVelocity(), flowmeter.getTemperature(), flowmeter.getInstantFlow(), flowmeter.getTotalFlow(), logTime); + + break; + case "WaterQuality": + msg = msg.replace("DevType", "devType");//凯那水质设备转发 + new Client().send(msg + "\r\n"); + break; + default: + break; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + } + + } + +} + + diff --git a/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java new file mode 100644 index 0000000..2473897 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/KafkaProperties.java @@ -0,0 +1,149 @@ +package com.casic.yizhuang.kafka; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public final class KafkaProperties +{ + private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class); + + // Topic名称,安全模式下,需要以管理员用户添加当前用户的访问权限 + public final static String SEND_DATA_TOPIC = "TEMPSTORE_8204"; + + public final static String REVICE_DATA_TOPIC = "MSGQUEUE_8204"; + + public final static String ALARM_TOPIC = "MSGQUEUE_8287"; + /** + * 用户自己申请的机机账号keytab文件名称 + */ + public static final String USER_KEYTAB_FILE = "user.keytab"; + + /** + * 用户自己申请的机机账号名称 + */ + public static final String USER_PRINCIPAL = "kafkauser"; + + + private static Properties serverProps = new Properties(); + + private static Properties producerProps = new Properties(); + + private static Properties consumerProps = new Properties(); + + private static Properties clientProps = new Properties(); + + private static KafkaProperties instance = null; + + private KafkaProperties() + { + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + try + { + File proFile = new File(filePath + "producer.properties"); + + if (proFile.exists()) + { + producerProps.load(new FileInputStream(filePath + "producer.properties")); + } + + File conFile = new File(filePath + "producer.properties"); + + if (conFile.exists()) + { + consumerProps.load(new FileInputStream(filePath + "consumer.properties")); + } + + File serFile = new File(filePath + "server.properties"); + + if (serFile.exists()) + { + serverProps.load(new FileInputStream(filePath + "server.properties")); + } + + File cliFile = new File(filePath + "client.properties"); + + if (cliFile.exists()) + { + clientProps.load(new FileInputStream(filePath + "client.properties")); + } + } + catch (IOException e) + { + LOG.info("The Exception occured.", e); + } + } + + public synchronized static KafkaProperties getInstance() + { + if (null == instance) + { + instance = new KafkaProperties(); + } + return instance; + } + + /** + * 获取参数值 + * @param key properites的key值 + * @param defValue 默认值 + * @return + */ + public String getValues(String key, String defValue) + { + String rtValue = null; + + if (null == key) + { + LOG.error("key is null"); + } + else + { + rtValue = getPropertiesValue(key); + } + + if (null == rtValue) + { + LOG.warn("KafkaProperties.getValues return null, key is " + key); + rtValue = defValue; + } + + LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue); + + return rtValue; + } + + /** + * 根据key值获取server.properties的值 + * @param key + * @return + */ + private String getPropertiesValue(String key) + { + String rtValue = serverProps.getProperty(key); + + // server.properties中没有,则再向producer.properties中获取 + if (null == rtValue) + { + rtValue = producerProps.getProperty(key); + } + + // producer中没有,则再向consumer.properties中获取 + if (null == rtValue) + { + rtValue = consumerProps.getProperty(key); + } + + // consumer没有,则再向client.properties中获取 + if (null == rtValue) + { + rtValue = clientProps.getProperty(key); + } + + return rtValue; + } +} diff --git a/src/main/java/com/casic/yizhuang/kafka/Producer.java b/src/main/java/com/casic/yizhuang/kafka/Producer.java new file mode 100644 index 0000000..666e563 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/Producer.java @@ -0,0 +1,45 @@ +package com.casic.yizhuang.kafka; + +import com.casic.yizhuang.util.KafkaUtils; +import com.casic.yizhuang.util.LoginUtil; +import kafka.utils.ShutdownableThread; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer extends Thread { + + private static final Logger LOG = LoggerFactory.getLogger(com.casic.yizhuang.kafka.Producer.class); + + private static KafkaProducer producer; + + public static void send(String content) throws IOException { + //!!注意,安全认证时,需要用户手动修改为自己申请的机机账号 + if (LoginUtil.isSecurityModel()) { + LoginUtil.securityPrepare(KafkaProperties.USER_PRINCIPAL, KafkaProperties.USER_KEYTAB_FILE); + } + Properties props = KafkaUtils.producerInitProperties(); + // 发布 + producer = new KafkaProducer(props); + LOG.info("producer start."); + ProducerRecord record = new ProducerRecord(KafkaProperties.SEND_DATA_TOPIC, "", content); + try { + LOG.info("kafka发送数据-------"+ content); + // 同步发送 + producer.send(record).get(); + } catch (InterruptedException ie) { + LOG.info("The InterruptedException occured : {}.", ie); + } catch (ExecutionException ee) { + LOG.info("The ExecutionException occured : {}.", ee); + }finally { + producer.close(); + } + + } + +} diff --git a/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java new file mode 100644 index 0000000..30cfc2b --- /dev/null +++ b/src/main/java/com/casic/yizhuang/kafka/SimplePartitioner.java @@ -0,0 +1,36 @@ +package com.casic.yizhuang.kafka; + +import org.apache.kafka.clients.producer.Partitioner; +import org.apache.kafka.common.Cluster; + +import java.util.Map; + +public class SimplePartitioner implements Partitioner { + + @Override + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + int partition = 0; + String partitionKey = (String) key; + int numPartitions = cluster.partitionsForTopic(topic).size(); + + try { + //指定分区逻辑,也就是key + partition = Integer.parseInt(partitionKey) % numPartitions; + } catch (NumberFormatException ne) { + //如果解析失败,都分配到0分区上 + partition = 0; + } + + return partition; + } + + @Override + public void close() { + + } + + @Override + public void configure(Map map) { + + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Flowmeter.java b/src/main/java/com/casic/yizhuang/model/Flowmeter.java new file mode 100644 index 0000000..57b7261 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Flowmeter.java @@ -0,0 +1,51 @@ +package com.casic.yizhuang.model; + +public class Flowmeter { + + private Float waterLevel;//水位 + private Float flowVelocity;//流速 + private Float temperature;//温度 + private Float instantFlow;//瞬时流量 + private Float totalFlow;//累计流量 + + + public Float getWaterLevel() { + return waterLevel; + } + + public void setWaterLevel(Float waterLevel) { + this.waterLevel = waterLevel; + } + + public Float getFlowVelocity() { + return flowVelocity; + } + + public void setFlowVelocity(Float flowVelocity) { + this.flowVelocity = flowVelocity; + } + + public Float getTemperature() { + return temperature; + } + + public void setTemperature(Float temperature) { + this.temperature = temperature; + } + + public Float getInstantFlow() { + return instantFlow; + } + + public void setInstantFlow(Float instantFlow) { + this.instantFlow = instantFlow; + } + + public Float getTotalFlow() { + return totalFlow; + } + + public void setTotalFlow(Float totalFlow) { + this.totalFlow = totalFlow; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/Hyetometer.java b/src/main/java/com/casic/yizhuang/model/Hyetometer.java new file mode 100644 index 0000000..ad09475 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/Hyetometer.java @@ -0,0 +1,22 @@ +package com.casic.yizhuang.model; + +public class Hyetometer { + private Float realtimeData;//上一分钟的实时雨量 + private Float cumulativeData;//日累计雨量 + + public Float getRealtimeData() { + return realtimeData; + } + + public void setRealtimeData(Float realtimeData) { + this.realtimeData = realtimeData; + } + + public Float getCumulativeData() { + return cumulativeData; + } + + public void setCumulativeData(Float cumulativeData) { + this.cumulativeData = cumulativeData; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardData.java b/src/main/java/com/casic/yizhuang/model/StandardData.java new file mode 100644 index 0000000..ce0f6cf --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardData.java @@ -0,0 +1,59 @@ +package com.casic.yizhuang.model; + +public class StandardData { + + private String DevID; + private String DevType; + private String Provider; + private String Status; + private String LogTime; + private String ProviderData; + + public StandardData(String DevID, String DevType, String Status, String LogTime) { + this.DevID = DevID; + this.DevType = DevType; + this.Provider = "Provider-ChangFeng"; + this.Status = Status; + this.LogTime = LogTime; + } + + public String getDevID() { + return DevID; + } + + public void setDevID(String devID) { + DevID = devID; + } + + public String getDevType() { + return DevType; + } + + public void setDevType(String devType) { + DevType = devType; + } + + public String getProvider() { + return Provider; + } + + public void setProvider(String provider) { + Provider = provider; + } + + public String getStatus() { + return Status; + } + + public void setStatus(String status) { + Status = status; + } + + public String getLogTime() { + return LogTime; + } + + public void setLogTime(String logTime) { + LogTime = logTime; + } +} diff --git a/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java new file mode 100644 index 0000000..28005f5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/StandardDataUtils.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.model; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; + + +import java.text.SimpleDateFormat; +import java.util.*; + +public class StandardDataUtils { + + public static List> appendListBuilder(String value, String key) { + List> standardkeyParmList = new ArrayList<>(); + Map standardkeyParm = new HashMap<>(); + standardkeyParm.put("Key", key); + standardkeyParm.put("Value", value); + standardkeyParmList.add(standardkeyParm); + return standardkeyParmList; + } + + public static String alarmDataBuilder(String devcode, String value, String alarmContent) { + String standradMsg = "在" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "," + + "设备" + devcode + "发生了" + alarmContent + ",数值为" + value; + return standradMsg; + } + +} diff --git a/src/main/java/com/casic/yizhuang/model/WellInfo.java b/src/main/java/com/casic/yizhuang/model/WellInfo.java new file mode 100644 index 0000000..c4b13a5 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/model/WellInfo.java @@ -0,0 +1,13 @@ +package com.casic.yizhuang.model; + +public class WellInfo{ + private String wellcode;//井编号 + + public String getWellcode() { + return wellcode; + } + + public void setWellcode(String wellcode) { + this.wellcode = wellcode; + } +} diff --git a/src/main/java/com/casic/yizhuang/mysql/DAO.java b/src/main/java/com/casic/yizhuang/mysql/DAO.java new file mode 100644 index 0000000..7e168d2 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DAO.java @@ -0,0 +1,88 @@ +package com.casic.yizhuang.mysql; + +import java.lang.reflect.Field; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + + +public class DAO { + ResultSet res = null; + Connection conn = null; + PreparedStatement pre = null; + + //给sql语句设置参数值 + private void setParameter(Object... parameter) { + for (int i = 0; i < parameter.length; i++) { + try { + pre.setObject(i + 1, parameter[i]); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } + + //从数据库获取值到ResultSet + public List query(Class cls , String sql, String... parameter){ + List list = new ArrayList<>(); + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + res = pre.executeQuery(); + + while(res.next()){ + T obj = cls.newInstance(); //创建这个类的新的实例 + setData(cls, obj); //给obj的属性设置值 + list.add(obj);//添加到list中 + } + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return list; + } + + /**插入/删除**/ + public int Update(String sql, Object... parameter){ + int num = 0 ; + try { + conn = DBUtil.getConnection(); + pre = conn.prepareStatement(sql); + setParameter(parameter); // 设置参数值 + num = pre.executeUpdate(); + + } catch (Exception e) { + e.printStackTrace(); + }finally{ + DBUtil.closeAll(res, pre, conn); + } + return num; // 受影响的行数 + } + + + + //将结果集给javebean对象设置值 , 通过对象属性的set方法设置值 + private void setData(Class cls, T obj){ + /** + * Field 对象属性 类, + * 通过方法 set(Object obj, Object value) ; 给obj设置其value值 + * 通过ResultSet对象 得到ResultSetMetaData接口的对象, 可以获得数据库字段属性 + */ + try { + ResultSetMetaData rsmd = res.getMetaData(); + int col = rsmd.getColumnCount(); + for (int i=1; i<=col; i++){ + String DBField = rsmd.getColumnLabel(i); + Field field = cls.getDeclaredField(DBField); + field.setAccessible(true); //私有可见 + field.set(obj, res.getObject(i)); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/mysql/DBUtil.java b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java new file mode 100644 index 0000000..4016a95 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/mysql/DBUtil.java @@ -0,0 +1,55 @@ +package com.casic.yizhuang.mysql; + +import java.sql.*; +import java.util.ResourceBundle; + + +public class DBUtil { + private static String driverClass; + private static String url; + private static String username; + private static String password; + + //静态代码块加载类时执行一次,加载数据库信息文件 + static{ + //用来加载properties文件的数据, (文件时键值对, 名字要完整匹配) + ResourceBundle rb = ResourceBundle.getBundle("jdbc");//这是properties的文件名 + driverClass = rb.getString("driverClass"); + url = rb.getString("url"); + username = rb.getString("username"); + password = rb.getString("password"); + try { + Class.forName(driverClass); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + } + //得到连接的方法 + public static Connection getConnection() throws Exception{ + return DriverManager.getConnection(url,username,password); + } + //关闭资源 + public static void closeAll(ResultSet rs , PreparedStatement pre, Connection conn){ + if (rs != null){ + try { + rs.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (pre != null){ + try { + pre.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + if (conn != null){ + try { + conn.close(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/com/casic/yizhuang/netty/Client.java b/src/main/java/com/casic/yizhuang/netty/Client.java new file mode 100644 index 0000000..2ca4947 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Client.java @@ -0,0 +1,27 @@ +package com.casic.yizhuang.netty; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioSocketChannel; + + +public class Client { + public void send(String sendMsg) throws Exception { + NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup(); + + try { + Bootstrap bootstrap = new Bootstrap(); + bootstrap.group(eventLoopGroup).channel(NioSocketChannel.class) + .handler(new Clientinitializer()); + + ChannelFuture channelFuture = bootstrap.connect("127.0.0.1", 2025).sync(); + Channel channel = channelFuture.channel(); + channel.writeAndFlush(sendMsg); + } finally { + eventLoopGroup.shutdownGracefully(); + } + } +} + diff --git a/src/main/java/com/casic/yizhuang/netty/ClientHandler.java b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java new file mode 100644 index 0000000..244b725 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/ClientHandler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.SimpleChannelInboundHandler; + +public class ClientHandler extends SimpleChannelInboundHandler { + + //接收服务端数据&发送数据 + @Override + protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { + + System.out.println("客户端接收到的消息: "+msg); + + ctx.writeAndFlush("12232321"); + + //完成通信后关闭连接 + //ctx.close(); + } + + //和服务器建立连接 + @Override + public void channelActive(ChannelHandlerContext ctx) throws Exception { + //ctx.writeAndFlush("在吗!!!!\\r\\n"); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + cause.printStackTrace(); + ctx.close(); + } +} diff --git a/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java new file mode 100644 index 0000000..0c1ea69 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/netty/Clientinitializer.java @@ -0,0 +1,28 @@ +package com.casic.yizhuang.netty; + +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.handler.codec.string.StringDecoder; +import io.netty.handler.codec.string.StringEncoder; +import io.netty.util.CharsetUtil; + +public class Clientinitializer extends ChannelInitializer { + + @Override + protected void initChannel(SocketChannel ch) throws Exception { + ChannelPipeline pipeline = ch.pipeline(); + + //数据分包,组包,粘包 + pipeline.addLast(new LengthFieldBasedFrameDecoder(1024,0,1024,0,4)); + //pipeline.addLast(new LengthFieldPrepender(4)); + + pipeline.addLast(new StringDecoder(CharsetUtil.UTF_8)); + pipeline.addLast(new StringEncoder(CharsetUtil.UTF_8)); + + pipeline.addLast(new ClientHandler()); + + } +} diff --git a/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java new file mode 100644 index 0000000..ed61e04 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/ReadAccessJob.java @@ -0,0 +1,80 @@ +package com.casic.yizhuang.quartz; + +import com.alibaba.fastjson.JSON; +import com.casic.yizhuang.access.DBUtils; +import com.casic.yizhuang.kafka.Producer; +import com.casic.yizhuang.model.Flowmeter; +import com.casic.yizhuang.model.StandardData; +import com.casic.yizhuang.model.StandardDataUtils; +import com.casic.yizhuang.model.WellInfo; +import com.casic.yizhuang.mysql.DAO; +import com.casic.yizhuang.util.Common; +import org.quartz.Job; +import org.quartz.JobExecutionContext; + +import java.text.SimpleDateFormat; +import java.util.*; + +public class ReadAccessJob implements Job { + + @Override + public void execute(JobExecutionContext jobExecutionContext) { + + String printTime = new SimpleDateFormat("yy-MM-dd HH-mm-ss").format(new Date()); + System.out.println("PrintWordsJob start at:" + printTime + ", prints: Hello Job-" + new Random().nextInt(100)); + + + try { + for (int i = 1; i <= 2; i++) { + + String sql = "select top 1 * from dCurrent where idfac= " + i + " order by time desc "; + List> list = DBUtils.select(sql, null); + + for (Map map : list) { + String devcode = map.get("idfac").toString(); + switch (devcode) { + case "1": + devcode = "16064235"; + break; + case "2": + devcode = "16064234"; + break; + default: + break; + } + + String realTimeData = map.get("e2").toString(); + String cumulativeData = map.get("e3").toString(); + String time = map.get("time").toString(); + + Class wellInfoClass = WellInfo.class; + List wellInfoList = new DAO<>().query(wellInfoClass, Common.SELECT_WELLCODE, devcode); + if (!wellInfoList.isEmpty()) { + String wellcode = wellInfoList.get(0).getWellcode(); + String standardStatus = standardStatusBuilder(realTimeData, cumulativeData); + StandardData standardData = new StandardData(devcode, "RainFall", standardStatus, time); + //向大数据平台转发 + Producer.send(JSON.toJSONString(standardData)); + new DAO<>().Update(Common.INSERT_HYETOMETER, devcode, wellcode, realTimeData, cumulativeData); + } + + } + } + + } catch (Exception e) { + e.printStackTrace(); + } + } + + //构建数据状态,温度格式暂时不知道 + private String standardStatusBuilder(String realTimeData, String cumulativeData) { + List> appendList = StandardDataUtils.appendListBuilder(String.valueOf(realTimeData), "InsFall"); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(cumulativeData), "DayFall")); + appendList.addAll(StandardDataUtils.appendListBuilder(String.valueOf(""), "Temp")); + appendList.addAll(StandardDataUtils.appendListBuilder("", "Power")); + String standardStatus = JSON.toJSONString(appendList); + return standardStatus; + } + +} + diff --git a/src/main/java/com/casic/yizhuang/quartz/Scheduler.java b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java new file mode 100644 index 0000000..be27f8d --- /dev/null +++ b/src/main/java/com/casic/yizhuang/quartz/Scheduler.java @@ -0,0 +1,31 @@ +package com.casic.yizhuang.quartz; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.quartz.*; +import org.quartz.impl.StdSchedulerFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Scheduler { + + + + public void start() throws SchedulerException { + SchedulerFactory schedulerFactory = new StdSchedulerFactory(); + org.quartz.Scheduler scheduler = schedulerFactory.getScheduler(); + JobDetail jobDetail = JobBuilder.newJob(ReadAccessJob.class) + .withIdentity("job", "group").build(); + Trigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "triggerGroup") + .startNow() + .withSchedule(SimpleScheduleBuilder.simpleSchedule() + .withIntervalInMinutes(5) + //.withIntervalInSeconds(5) + .repeatForever()).build(); + + scheduler.scheduleJob(jobDetail, trigger); + scheduler.start(); + } +} diff --git a/src/main/java/com/casic/yizhuang/util/Common.java b/src/main/java/com/casic/yizhuang/util/Common.java new file mode 100644 index 0000000..4fe7446 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/Common.java @@ -0,0 +1,12 @@ +package com.casic.yizhuang.util; + +public class Common { + + public static final String INSERT_FLOWMETER = "insert into data_flowmeter (devcode,well_code,WaterLevel,FlowVelocity,Temperature,InstantFlow,TotalFlow,logtime) values(?,?,?,?,?,?,?,?)"; + + public static final String INSERT_HYETOMETER = "insert into data_hyetometer (devcode,well_code,REALTIMEDATA,CUMULATIVEDATA) values(?,?,?,?)"; + + public static final String SELECT_WELLCODE = + "SELECT well_code AS wellcode FROM bus_device d JOIN bus_device_well dw ON d.id=dw.device_id JOIN bus_well_info w ON w.id=dw.well_id WHERE d.devcode=?"; + +} diff --git a/src/main/java/com/casic/yizhuang/util/KafkaUtils.java b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java new file mode 100644 index 0000000..9d3fa9c --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/KafkaUtils.java @@ -0,0 +1,129 @@ +package com.casic.yizhuang.util; + +import com.casic.yizhuang.kafka.KafkaProperties; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +import java.util.Properties; + +public class KafkaUtils { + + + // 一次请求的最大等待时间(Ms) + private final int waitTime = 1000; + + // Broker连接地址 + + // Broker连接地址 + private final static String BOOTSTRAP_SERVER = "bootstrap.servers"; + + // Group id + private final static String GROUP_ID = "group.id"; + + // 消息内容使用的反序列化类 + private final static String VALUE_DESERIALIZER = "value.deserializer"; + + // 消息Key值使用的反序列化类 + private final static String KEY_DESERIALIZER = "key.deserializer"; + + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + private final static String SECURITY_PROTOCOL = "security.protocol"; + + // 服务名 + private final static String SASL_MECHANISM = "sasl.mechanism"; + + // 服务名 + private final static String SASL_KERBEROS_SERVICE_NAME = "sasl.kerberos.service.name"; + + // 域名 + private final static String KERBEROS_DOMAIN_NAME = "kerberos.domain.name"; + + // 是否自动提交offset + private final static String ENABLE_AUTO_COMMIT = "enable.auto.commit"; + + // 自动提交offset的时间间隔 + private final static String AUTO_COMMIT_INTERVAL_MS = "auto.commit.interval.ms"; + + // 会话超时时间 + private final static String SESSION_TIMEOUT_MS = "session.timeout.ms"; + + // 客户端ID + private final static String CLIENT_ID = "client.id"; + + // Key序列化类 + private final static String KEY_SERIALIZER = "key.serializer"; + + // Value序列化类 + private final static String VALUE_SERIALIZER = "value.serializer"; + + + // 分区类名 + private final static String PARTITIONER_NAME = "partitioner.class"; + + // 默认发送100条消息 + private final static int MESSAGE_NUM = 100; + + public static Properties consumerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker连接地址 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // Group id + props.put(GROUP_ID, kafkaProc.getValues(GROUP_ID, "DemoConsumer")); + // 是否自动提交offset + props.put(ENABLE_AUTO_COMMIT, kafkaProc.getValues(ENABLE_AUTO_COMMIT, "true")); + // 自动提交offset的时间间隔 + props.put(AUTO_COMMIT_INTERVAL_MS, kafkaProc.getValues(AUTO_COMMIT_INTERVAL_MS,"1000")); + // 会话超时时间 + props.put(SESSION_TIMEOUT_MS, kafkaProc.getValues(SESSION_TIMEOUT_MS, "30000")); + // 消息Key值使用的反序列化类 + props.put(KEY_DESERIALIZER, + kafkaProc.getValues(KEY_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 消息内容使用的反序列化类 + props.put(VALUE_DESERIALIZER, + kafkaProc.getValues(VALUE_DESERIALIZER, "org.apache.kafka.common.serialization.StringDeserializer")); + // 安全协议类型 + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + + props.put(SASL_MECHANISM, "GSSAPI"); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + + return props; + } + + + public static Properties producerInitProperties() { + Properties props = new Properties(); + KafkaProperties kafkaProc = KafkaProperties.getInstance(); + + // Broker地址列表 + props.put(BOOTSTRAP_SERVER, kafkaProc.getValues(BOOTSTRAP_SERVER, "localhost:21007")); + // 客户端ID + props.put(CLIENT_ID, kafkaProc.getValues(CLIENT_ID, "DemoProducer")); + // Key序列化类 + props.put(KEY_SERIALIZER, + kafkaProc.getValues(KEY_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // Value序列化类 + props.put(VALUE_SERIALIZER, + kafkaProc.getValues(VALUE_SERIALIZER, "org.apache.kafka.common.serialization.StringSerializer")); + // 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT + props.put(SECURITY_PROTOCOL, kafkaProc.getValues(SECURITY_PROTOCOL, "SASL_PLAINTEXT")); + // 服务名 + props.put(SASL_KERBEROS_SERVICE_NAME, "kafka"); + // 域名 + props.put(KERBEROS_DOMAIN_NAME, kafkaProc.getValues(KERBEROS_DOMAIN_NAME, "hadoop.hadoop.com")); + // 分区类名 + props.put(PARTITIONER_NAME, + kafkaProc.getValues(PARTITIONER_NAME, "com.casic.yizhuang.kafka.SimplePartitioner")); + + System.setProperty("java.security.auth.login.config","D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient.jaas.conf"); + return props; + } + + +} diff --git a/src/main/java/com/casic/yizhuang/util/LoginUtil.java b/src/main/java/com/casic/yizhuang/util/LoginUtil.java new file mode 100644 index 0000000..71db976 --- /dev/null +++ b/src/main/java/com/casic/yizhuang/util/LoginUtil.java @@ -0,0 +1,257 @@ +package com.casic.yizhuang.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Properties; + +public class LoginUtil { + private static final Logger LOG = LoggerFactory.getLogger(LoginUtil.class); + + /** + * no JavaDoc + */ + public enum Module { + STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client"); + + private String name; + + private Module(String name) + { + this.name = name; + } + + public String getName() + { + return name; + } + } + + /** + * line operator string + */ + private static final String LINE_SEPARATOR = System.getProperty("line.separator"); + + /** + * jaas file postfix + */ + private static final String JAAS_POSTFIX = ".jaas.conf"; + + /** + * is IBM jdk or not + */ + private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM"); + + /** + * IBM jdk login module + */ + private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required"; + + /** + * oracle jdk login module + */ + private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required"; + + /** + * Zookeeper quorum principal. + */ + public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal"; + + /** + * java security krb5 file path + */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** + * java security login file path + */ + public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config"; + + /** + * 设置jaas.conf文件 + * + * @param principal + * @param keytabPath + * @throws IOException + */ + public static void setJaasFile(String principal, String keytabPath) + throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\kafkaClient"; + String jaasPath = filePath + JAAS_POSTFIX; + + // windows路径下分隔符替换 + jaasPath = jaasPath.replace("\\", "\\\\"); + // 删除jaas文件 + deleteJaasFile(jaasPath); + writeJaasFile(jaasPath, principal, keytabPath); + System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath); + } + + /** + * 设置zookeeper服务端principal + * + * @param zkServerPrincipal + * @throws IOException + */ + public static void setZookeeperServerPrincipal(String zkServerPrincipal) + throws IOException { + System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal); + String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL); + if (ret == null) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null."); + } + if (!ret.equals(zkServerPrincipal)) + { + throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + "."); + } + } + + /** + * 设置krb5文件 + * + * @param krb5ConfFile + * @throws IOException + */ + public static void setKrb5Config(String krb5ConfFile) + throws IOException { + System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile); + String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF); + if (ret == null) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null."); + } + if (!ret.equals(krb5ConfFile)) + { + throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + "."); + } + } + + /** + * 写入jaas文件 + * + * @throws IOException + * 写文件异常 + */ + private static void writeJaasFile(String jaasPath, String principal, String keytabPath) + throws IOException { + FileWriter writer = new FileWriter(new File(jaasPath)); + try + { + writer.write(getJaasConfContext(principal, keytabPath)); + writer.flush(); + } + catch (IOException e) + { + throw new IOException("Failed to create jaas.conf File"); + } + finally + { + writer.close(); + } + } + + private static void deleteJaasFile(String jaasPath) + throws IOException { + File jaasFile = new File(jaasPath); + if (jaasFile.exists()) + { + if (!jaasFile.delete()) + { + throw new IOException("Failed to delete exists jaas file."); + } + } + } + + private static String getJaasConfContext(String principal, String keytabPath) { + Module[] allModule = Module.values(); + StringBuilder builder = new StringBuilder(); + for (Module modlue : allModule) + { + builder.append(getModuleContext(principal, keytabPath, modlue)); + } + return builder.toString(); + } + + private static String getModuleContext(String userPrincipal, String keyTabPath, Module module) { + StringBuilder builder = new StringBuilder(); + if (IS_IBM_JDK) { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("credsType=both").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } else { + builder.append(module.getName()).append(" {").append(LINE_SEPARATOR); + builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR); + builder.append("useKeyTab=true").append(LINE_SEPARATOR); + builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR); + builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR); + builder.append("useTicketCache=false").append(LINE_SEPARATOR); + builder.append("storeKey=true").append(LINE_SEPARATOR); + builder.append("debug=true;").append(LINE_SEPARATOR); + builder.append("};").append(LINE_SEPARATOR); + } + + return builder.toString(); + } + + private static final String filePath = "D:\\casic203\\software\\software\\20200616\\yizhuang\\config\\"; + public static void securityPrepare(String principal, String keyTabFile) throws IOException { +// String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator; + String krbFile = filePath + "krb5.conf"; + String userKeyTableFile = filePath + keyTabFile; + // windows路径下分隔符替换 + userKeyTableFile = userKeyTableFile.replace("\\", "\\\\"); + krbFile = krbFile.replace("\\", "\\\\"); + principal+="@HADOOP.COM"; + LoginUtil.setKrb5Config(krbFile); + LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com"); + LoginUtil.setJaasFile(principal, userKeyTableFile); + } + + /** + * Check security mode + * + * @return boolean + */ + public static Boolean isSecurityModel() { + Boolean isSecurity = false; + + String krbFilePath = filePath + "kafkaSecurityMode"; + + Properties securityProps = new Properties(); + // file does not exist. + if (!isFileExists(krbFilePath)) { + return isSecurity; + } + try { + securityProps.load(new FileInputStream(krbFilePath)); + + if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode"))) + { + isSecurity = true; + } + } catch (Exception e) { + LOG.info("The Exception occured : {}.", e); + } + + return true; + } + + /* + * 判断文件是否存在 + */ + private static boolean isFileExists(String fileName) { + File file = new File(fileName); + + return file.exists(); + } +} diff --git a/src/main/resources/77042.jaas.conf b/src/main/resources/77042.jaas.conf new file mode 100644 index 0000000..a86b107 --- /dev/null +++ b/src/main/resources/77042.jaas.conf @@ -0,0 +1,27 @@ +StormClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +KafkaClient { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; +Client { +com.sun.security.auth.module.Krb5LoginModule required +useKeyTab=true +keyTab="D:\\cz\\203\\file\\yizhuang\\src\\main\\resources\\user.keytab" +principal="kafkauser@HADOOP.COM" +useTicketCache=false +storeKey=true +debug=true; +}; diff --git a/src/main/resources/META-INF/MANIFEST.MF b/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..2692c5f --- /dev/null +++ b/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: com.casic.yizhuang.Main + diff --git a/src/main/resources/consumer.properties b/src/main/resources/consumer.properties new file mode 100644 index 0000000..1451c84 --- /dev/null +++ b/src/main/resources/consumer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +group.id = example-group1 +auto.commit.interval.ms = 60000 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties new file mode 100644 index 0000000..5fcbb11 --- /dev/null +++ b/src/main/resources/jdbc.properties @@ -0,0 +1,9 @@ +driverClass = com.mysql.cj.jdbc.Driver +url = jdbc:mysql://192.168.4.218:3306/smartwell_yizhuang?serverTimezone=GMT%2B8 +username = root +password = casic203yz2db + +#url = jdbc:mysql://111.198.10.15:11102/smartwell_yizhuang?serverTimezone=GMT%2B8 + +#username = root +#password = Casic203! \ No newline at end of file diff --git a/src/main/resources/kafkaSecurityMode b/src/main/resources/kafkaSecurityMode new file mode 100644 index 0000000..ed59a5e --- /dev/null +++ b/src/main/resources/kafkaSecurityMode @@ -0,0 +1 @@ +kafka.client.security.mode = yes diff --git a/src/main/resources/krb5.conf b/src/main/resources/krb5.conf new file mode 100644 index 0000000..003c6c7 --- /dev/null +++ b/src/main/resources/krb5.conf @@ -0,0 +1,48 @@ +[kdcdefaults] +kdc_ports = 192.168.65.19:21732 +kdc_tcp_ports = "" + +[libdefaults] +default_realm = HADOOP.COM +kdc_timeout = 2500 +clockskew = 300 +use_dns_lookup = 0 +udp_preference_limit = 1465 +max_retries = 5 +dns_lookup_kdc = false +dns_lookup_realm = false +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +allow_extend_version = false +default_ccache_name = FILE:/tmp//krb5cc_%{uid} + +[realms] +HADOOP.COM = { +kdc = 192.168.65.19:21732 +kdc = 192.168.65.18:21732 +admin_server = 192.168.65.19:21730 +admin_server = 192.168.65.18:21730 +kpasswd_server = 192.168.65.19:21731 +kpasswd_server = 192.168.65.18:21731 +kpasswd_port = 21731 +kadmind_port = 21730 +kadmind_listen = 192.168.65.19:21730 +kpasswd_listen = 192.168.65.19:21731 +renewable = false +forwardable = false +renew_lifetime = 0m +max_renewable_life = 30m +acl_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/kadm5.acl +dict_file = /opt/huawei/Bigdata/common/runtime/security/weakPasswdDic/weakPasswdForKdc.ini +key_stash_file = /opt/huawei/Bigdata/FusionInsight_BASE_8.1.2.2/install/FusionInsight-kerberos-1.18/kerberos/var/krb5kdc/.k5.HADOOP.COM +} + +[domain_realm] +.hadoop.com = HADOOP.COM + +[logging] +kdc = SYSLOG:INFO:DAEMON +admin_server = SYSLOG:INFO:DAEMON +default = SYSLOG:NOTICE:DAEMON diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties new file mode 100644 index 0000000..10e767d --- /dev/null +++ b/src/main/resources/log4j.properties @@ -0,0 +1,37 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +kafka.logs.dir=logs + +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=ERROR, kafkaAppender + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/client.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Turn on all our debugging info +#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender +#log4j.logger.kafka.client.ClientUtils=DEBUG, kafkaAppender +#log4j.logger.kafka.perf=DEBUG, kafkaAppender +#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender +#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG diff --git a/src/main/resources/producer.properties b/src/main/resources/producer.properties new file mode 100644 index 0000000..5e6446a --- /dev/null +++ b/src/main/resources/producer.properties @@ -0,0 +1,5 @@ +security.protocol = SASL_PLAINTEXT +kerberos.domain.name = hadoop.hadoop.com +acks = 1 +bootstrap.servers = 192.168.65.16:21007,192.168.65.15:21007,192.168.65.14:21007 +sasl.kerberos.service.name = kafka diff --git a/src/main/resources/user.keytab b/src/main/resources/user.keytab new file mode 100644 index 0000000..a10b711 --- /dev/null +++ b/src/main/resources/user.keytab Binary files differ